hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7c88431dd8b111e4b638834dd3dd63e469bfd06c
| 32
|
py
|
Python
|
downloadNLTK.py
|
ExploreNcrack/Language-Model-Information-Retrieval
|
d7042209558e0ab5207f30660ea3fb43955f4a96
|
[
"MIT"
] | 1
|
2020-09-30T19:53:36.000Z
|
2020-09-30T19:53:36.000Z
|
downloadNLTK.py
|
ExploreNcrack/Language-Model-Information-Retrieval
|
d7042209558e0ab5207f30660ea3fb43955f4a96
|
[
"MIT"
] | null | null | null |
downloadNLTK.py
|
ExploreNcrack/Language-Model-Information-Retrieval
|
d7042209558e0ab5207f30660ea3fb43955f4a96
|
[
"MIT"
] | null | null | null |
import nltk
nltk.download("all")
| 16
| 20
| 0.78125
| 5
| 32
| 5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 32
| 2
| 20
| 16
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7c8ef041ec4e6947688c30a7575b6dbfe1744c8b
| 1,379
|
py
|
Python
|
1-50/PE8.py
|
Skylite73/My_Project_Euler
|
8c5f7fcb5ecab90bab44e6a4d26ebc0a6779990a
|
[
"MIT"
] | null | null | null |
1-50/PE8.py
|
Skylite73/My_Project_Euler
|
8c5f7fcb5ecab90bab44e6a4d26ebc0a6779990a
|
[
"MIT"
] | null | null | null |
1-50/PE8.py
|
Skylite73/My_Project_Euler
|
8c5f7fcb5ecab90bab44e6a4d26ebc0a6779990a
|
[
"MIT"
] | null | null | null |
# Project Euler 8
import timeit
def thing():
num = str(7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450)
maxx = 0
for i in range(len(num)):
if i <= 987:
mult = 1
for j in num[i:i+13]:
mult *= int(j)
if mult > maxx:
maxx = mult
print(maxx, num[i:i+13])
print("time", timeit.timeit("thing()", globals=globals(), number=1))
| 81.117647
| 1,015
| 0.851342
| 53
| 1,379
| 22.150943
| 0.528302
| 0.006814
| 0.008518
| 0.011925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.82329
| 0.1095
| 1,379
| 16
| 1,016
| 86.1875
| 0.132736
| 0.010877
| 0
| 0
| 0
| 0
| 0.008076
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.153846
| 0.153846
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7cbad17b3f5e7e98192c3c1cb1d2e2da8d281385
| 4,138
|
py
|
Python
|
e2e_tests/tests/nightly/test_capability.py
|
gh-determined-ai/determined
|
9a1ab33a3a356b69681b3351629fef4ab98ddb56
|
[
"Apache-2.0"
] | null | null | null |
e2e_tests/tests/nightly/test_capability.py
|
gh-determined-ai/determined
|
9a1ab33a3a356b69681b3351629fef4ab98ddb56
|
[
"Apache-2.0"
] | null | null | null |
e2e_tests/tests/nightly/test_capability.py
|
gh-determined-ai/determined
|
9a1ab33a3a356b69681b3351629fef4ab98ddb56
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from tests import config as conf
from tests import experiment as exp
@pytest.mark.nightly
def test_mmdetection_pytorch_const() -> None:
config = conf.load_config(conf.cv_examples_path("mmdetection_pytorch/const_fake_data.yaml"))
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(config, conf.cv_examples_path("mmdetection_pytorch"), 1)
@pytest.mark.nightly
def test_bert_glue_const() -> None:
config = conf.load_config(conf.nlp_examples_path("bert_glue_pytorch/const.yaml"))
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(config, conf.nlp_examples_path("bert_glue_pytorch"), 1)
@pytest.mark.nightly
def test_gaea_pytorch_const() -> None:
config = conf.load_config(conf.nas_examples_path("gaea_pytorch/eval/const.yaml"))
config = conf.set_global_batch_size(config, 32)
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(config, conf.nas_examples_path("gaea_pytorch/eval"), 1)
@pytest.mark.nightly
def test_gan_mnist_pytorch_const() -> None:
config = conf.load_config(conf.gan_examples_path("gan_mnist_pytorch/const.yaml"))
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(config, conf.gan_examples_path("gan_mnist_pytorch"), 1)
@pytest.mark.nightly
def test_pix2pix_facades_const() -> None:
config = conf.load_config(conf.gan_examples_path("pix2pix_tf_keras/const.yaml"))
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(config, conf.gan_examples_path("pix2pix_tf_keras"), 1)
@pytest.mark.nightly
def test_detr_coco_pytorch_const() -> None:
config = conf.load_config(conf.cv_examples_path("detr_coco_pytorch/const_fake.yaml"))
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(config, conf.cv_examples_path("detr_coco_pytorch"), 1)
@pytest.mark.nightly
def test_efficientdet_coco_pytorch_const() -> None:
config = conf.load_config(conf.cv_examples_path("efficientdet_pytorch/const_fake.yaml"))
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(config, conf.cv_examples_path("efficientdet_pytorch"), 1)
@pytest.mark.nightly
def test_detectron2_coco_pytorch_const() -> None:
config = conf.load_config(conf.cv_examples_path("detectron2_coco_pytorch/const_fake.yaml"))
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(config, conf.cv_examples_path("detectron2_coco_pytorch"), 1)
@pytest.mark.nightly
def test_deformabledetr_coco_pytorch_const() -> None:
config = conf.load_config(conf.cv_examples_path("deformabledetr_coco_pytorch/const_fake.yaml"))
config = conf.set_max_length(config, {"batches": 200})
exp.run_basic_test_with_temp_config(
config, conf.cv_examples_path("deformabledetr_coco_pytorch"), 1
)
@pytest.mark.nightly
def test_word_language_transformer_const() -> None:
config = conf.load_config(conf.nlp_examples_path("word_language_model/const.yaml"))
config = conf.set_max_length(config, {"batches": 200})
config = config.copy()
config["hyperparameters"]["model_cls"] = "Transformer"
exp.run_basic_test_with_temp_config(config, conf.nlp_examples_path("word_language_model"), 1)
@pytest.mark.nightly
def test_word_language_lstm_const() -> None:
config = conf.load_config(conf.nlp_examples_path("word_language_model/const.yaml"))
config = conf.set_max_length(config, {"batches": 200})
config = config.copy()
config["hyperparameters"]["model_cls"] = "LSTM"
config["hyperparameters"]["tied"] = False
exp.run_basic_test_with_temp_config(config, conf.nlp_examples_path("word_language_model"), 1)
@pytest.mark.nightly
def test_protein_pytorch_geometric() -> None:
config = conf.load_config(conf.graphs_examples_path("proteins_pytorch_geometric/const.yaml"))
exp.run_basic_test_with_temp_config(
config, conf.graphs_examples_path("proteins_pytorch_geometric"), 1
)
| 37.618182
| 100
| 0.769696
| 595
| 4,138
| 4.952941
| 0.115966
| 0.162878
| 0.069223
| 0.081439
| 0.90397
| 0.888361
| 0.865287
| 0.808619
| 0.65151
| 0.638276
| 0
| 0.014367
| 0.108507
| 4,138
| 109
| 101
| 37.963303
| 0.784494
| 0
| 0
| 0.430556
| 0
| 0
| 0.192122
| 0.11479
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.041667
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7cd1a1442b81b6fae7804af3a5638504c2dc2126
| 71
|
py
|
Python
|
src/evaluator/__init__.py
|
natgen-team/NatGen
|
b1a7070caf3ff45cf2be1fc2196198e4e23cb30d
|
[
"MIT"
] | null | null | null |
src/evaluator/__init__.py
|
natgen-team/NatGen
|
b1a7070caf3ff45cf2be1fc2196198e4e23cb30d
|
[
"MIT"
] | null | null | null |
src/evaluator/__init__.py
|
natgen-team/NatGen
|
b1a7070caf3ff45cf2be1fc2196198e4e23cb30d
|
[
"MIT"
] | null | null | null |
from .CodeBLEU import *
from .bleu import *
from .smooth_bleu import *
| 17.75
| 26
| 0.746479
| 10
| 71
| 5.2
| 0.5
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 71
| 3
| 27
| 23.666667
| 0.881356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7cfda1ef20a769fe9c7708be023f89901c031501
| 118
|
py
|
Python
|
decision_tree/__init__.py
|
metjush/decision_tree
|
561299819068b393f599f9f4c4c86f0ffb8c6d98
|
[
"MIT"
] | 5
|
2015-11-09T16:07:04.000Z
|
2021-04-08T06:47:14.000Z
|
decision_tree/__init__.py
|
metjush/decision_tree
|
561299819068b393f599f9f4c4c86f0ffb8c6d98
|
[
"MIT"
] | null | null | null |
decision_tree/__init__.py
|
metjush/decision_tree
|
561299819068b393f599f9f4c4c86f0ffb8c6d98
|
[
"MIT"
] | 3
|
2016-06-20T22:52:45.000Z
|
2021-04-08T06:47:43.000Z
|
from ClassTree import ClassificationTree
from ClassForest import RandomForest
from ClassTreeBagging import TreeBagger
| 29.5
| 40
| 0.898305
| 12
| 118
| 8.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 118
| 3
| 41
| 39.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6b0b68b9cd08d080cce53ec9456ad346967c77ba
| 113
|
py
|
Python
|
corecat/models/__init__.py
|
DanceCats/dancecat-core
|
877c475413237205526cca02372f378b6f39dbb3
|
[
"MIT"
] | 1
|
2017-03-25T14:30:30.000Z
|
2017-03-25T14:30:30.000Z
|
corecat/models/__init__.py
|
DanceCats/dancecat-core
|
877c475413237205526cca02372f378b6f39dbb3
|
[
"MIT"
] | 3
|
2017-03-23T11:05:02.000Z
|
2017-04-03T08:38:40.000Z
|
corecat/models/__init__.py
|
DanceCats/dancecat-core
|
877c475413237205526cca02372f378b6f39dbb3
|
[
"MIT"
] | 1
|
2017-03-18T07:21:59.000Z
|
2017-03-18T07:21:59.000Z
|
"""Initial file for corecat.models packages."""
from ._sqlalchemy import Base, ProxyDictMixin, CoreCatBaseMixin
| 28.25
| 63
| 0.79646
| 12
| 113
| 7.416667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106195
| 113
| 3
| 64
| 37.666667
| 0.881188
| 0.362832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
86427665958f0b8e9d275a1ae02ae6fc4f58e20d
| 67
|
py
|
Python
|
pylookup/__init__.py
|
zachbateman/pylookup
|
77e4837a32464570c2a771e860d5d1175276df4d
|
[
"MIT"
] | 1
|
2021-11-27T20:18:08.000Z
|
2021-11-27T20:18:08.000Z
|
pylookup/__init__.py
|
zachbateman/pylookup
|
77e4837a32464570c2a771e860d5d1175276df4d
|
[
"MIT"
] | null | null | null |
pylookup/__init__.py
|
zachbateman/pylookup
|
77e4837a32464570c2a771e860d5d1175276df4d
|
[
"MIT"
] | null | null | null |
from .pylookup import pylookup, file_lookup
from . import __main__
| 22.333333
| 43
| 0.820896
| 9
| 67
| 5.555556
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134328
| 67
| 2
| 44
| 33.5
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
864934997ca0565990bf4242b7ceb86191499ab5
| 105
|
py
|
Python
|
python/testData/inspections/ChainedComparison4.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/ChainedComparison4.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/ChainedComparison4.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
a = <weak_warning descr="Simplify chained comparison">0 <= <caret>x < top and 0 >= y > top</weak_warning>
| 105
| 105
| 0.695238
| 17
| 105
| 4.176471
| 0.764706
| 0.309859
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.142857
| 105
| 1
| 105
| 105
| 0.766667
| 0
| 0
| 0
| 0
| 0
| 0.254717
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
86498d1a7849b73f2e21af2d7f3f9a86263ad01a
| 112,272
|
py
|
Python
|
manila/tests/share/drivers/test_service_instance.py
|
deiter/manila
|
ba94d20e823d2edad7e9bd01546cf1642b17d212
|
[
"Apache-2.0"
] | 1
|
2019-05-06T10:33:38.000Z
|
2019-05-06T10:33:38.000Z
|
manila/tests/share/drivers/test_service_instance.py
|
deiter/manila
|
ba94d20e823d2edad7e9bd01546cf1642b17d212
|
[
"Apache-2.0"
] | 4
|
2019-05-06T11:45:17.000Z
|
2019-05-09T14:23:28.000Z
|
manila/tests/share/drivers/test_service_instance.py
|
deiter/manila
|
ba94d20e823d2edad7e9bd01546cf1642b17d212
|
[
"Apache-2.0"
] | 3
|
2019-05-03T12:32:47.000Z
|
2021-01-30T20:26:19.000Z
|
# Copyright (c) 2014 NetApp, Inc.
# Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the instance module."""
import os
import time
import ddt
import mock
import netaddr
from oslo_config import cfg
from oslo_utils import importutils
import six
from manila import exception
from manila.share import configuration
from manila.share import driver # noqa
from manila.share.drivers import service_instance
from manila import test
from manila.tests import fake_compute
from manila.tests import fake_network
from manila.tests import utils as test_utils
CONF = cfg.CONF
def fake_get_config_option(key):
if key == 'driver_handles_share_servers':
return True
elif key == 'service_instance_password':
return None
elif key == 'service_instance_user':
return 'fake_user'
elif key == 'service_network_name':
return 'fake_service_network_name'
elif key == 'service_instance_flavor_id':
return 100
elif key == 'service_instance_name_template':
return 'fake_manila_service_instance_%s'
elif key == 'service_image_name':
return 'fake_service_image_name'
elif key == 'manila_service_keypair_name':
return 'fake_manila_service_keypair_name'
elif key == 'path_to_private_key':
return 'fake_path_to_private_key'
elif key == 'path_to_public_key':
return 'fake_path_to_public_key'
elif key == 'max_time_to_build_instance':
return 500
elif key == 'connect_share_server_to_tenant_network':
return False
elif key == 'service_network_cidr':
return '99.254.0.0/24'
elif key == 'service_network_division_mask':
return 27
elif key == 'service_instance_network_helper_type':
return service_instance.NEUTRON_NAME
elif key == 'service_network_name':
return 'fake_service_network_name'
elif key == 'interface_driver':
return 'i.am.fake.VifDriver'
elif key == 'admin_network_id':
return None
elif key == 'admin_subnet_id':
return None
else:
return mock.Mock()
class FakeServiceInstance(object):
def __init__(self, driver_config=None):
super(FakeServiceInstance, self).__init__()
self.compute_api = service_instance.compute.API()
self.admin_context = service_instance.context.get_admin_context()
self.driver_config = driver_config
def get_config_option(self, key):
return fake_get_config_option(key)
class FakeNetworkHelper(service_instance.BaseNetworkhelper):
@property
def NAME(self):
return self.get_config_option("service_instance_network_helper_type")
def __init__(self, service_instance_manager):
self.get_config_option = service_instance_manager.get_config_option
def get_network_name(self, network_info):
"""Return name of network."""
return 'fake_network_name'
def setup_connectivity_with_service_instances(self):
"""Nothing to do in fake network helper."""
def setup_network(self, network_info):
"""Combine fake network data."""
return dict()
def teardown_network(self, server_details):
"""Nothing to do in fake network helper."""
@ddt.ddt
class ServiceInstanceManagerTestCase(test.TestCase):
"""Test suite for service instance manager."""
def setUp(self):
super(ServiceInstanceManagerTestCase, self).setUp()
self.instance_id = 'fake_instance_id'
self.config = configuration.Configuration(None)
self.config.safe_get = mock.Mock(side_effect=fake_get_config_option)
self.mock_object(service_instance.compute, 'API', fake_compute.API)
self.mock_object(
service_instance.os.path, 'exists', mock.Mock(return_value=True))
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
self.mock_object(service_instance, 'NovaNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
self._manager = service_instance.ServiceInstanceManager(self.config)
self._manager._execute = mock.Mock(return_value=('', ''))
self.mock_object(time, 'sleep')
def test_get_config_option_from_driver_config(self):
username1 = 'fake_username_1_%s' % self.id()
username2 = 'fake_username_2_%s' % self.id()
config_data = dict(
DEFAULT=dict(service_instance_user=username1),
CUSTOM=dict(service_instance_user=username2))
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(
service_instance.common_opts, config_group='CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
result = self._manager.get_config_option('service_instance_user')
self.assertEqual(username2, result)
def test_get_config_option_from_common_config(self):
username = 'fake_username_%s' % self.id()
config_data = dict(DEFAULT=dict(service_instance_user=username))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
result = self._manager.get_config_option('service_instance_user')
self.assertEqual(username, result)
def test_get_nova_network_helper(self):
# Mock it again, because one of these was called in setUp method.
self.mock_object(service_instance, 'NeutronNetworkHelper')
self.mock_object(service_instance, 'NovaNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=True,
service_instance_network_helper_type=service_instance.NOVA_NAME))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self._manager.network_helper
service_instance.NovaNetworkHelper.assert_called_once_with(
self._manager)
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_get_neutron_network_helper(self):
# Mock it again, because one of these was called in setUp method.
self.mock_object(service_instance, 'NeutronNetworkHelper')
self.mock_object(service_instance, 'NovaNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=True,
service_instance_network_helper_type=service_instance.NEUTRON_NAME)
)
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self._manager.network_helper
service_instance.NeutronNetworkHelper.assert_called_once_with(
self._manager)
self.assertFalse(service_instance.NovaNetworkHelper.called)
@ddt.data(
None, '', 'fake', service_instance.NOVA_NAME + '_as_prefix',
service_instance.NEUTRON_NAME + '_as_prefix',
'as_suffix_' + service_instance.NOVA_NAME,
'as_suffix_' + service_instance.NEUTRON_NAME)
def test_get_fake_network_helper(self, value):
# Mock it again, because one of these was called in setUp method.
self.mock_object(service_instance, 'NeutronNetworkHelper')
self.mock_object(service_instance, 'NovaNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=True,
service_instance_network_helper_type=value))
with test_utils.create_temp_config_with_opts(config_data):
manager = service_instance.ServiceInstanceManager()
self.assertRaises(exception.ManilaException,
lambda: manager.network_helper)
self.assertFalse(service_instance.NeutronNetworkHelper.called)
self.assertFalse(service_instance.NovaNetworkHelper.called)
def test_init_with_driver_config_and_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
self.mock_object(service_instance, 'NovaNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user',
service_instance_network_helper_type=service_instance.NOVA_NAME))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
self.assertEqual(
True,
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNotNone(self._manager.driver_config)
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NovaNetworkHelper.called)
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_init_with_driver_config_and_wo_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
self.mock_object(service_instance, 'NovaNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=False,
service_instance_user='fake_user'))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
self.assertIsNotNone(self._manager.driver_config)
self.assertFalse(hasattr(self._manager, 'network_helper'))
self.assertFalse(service_instance.NovaNetworkHelper.called)
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_init_with_common_config_and_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
self.mock_object(service_instance, 'NovaNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=True,
service_instance_network_helper_type=service_instance.NOVA_NAME))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self.assertEqual(
True,
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNone(self._manager.driver_config)
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NovaNetworkHelper.called)
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_init_with_common_config_and_wo_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
self.mock_object(service_instance, 'NovaNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=False))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self.assertEqual(
False,
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNone(self._manager.driver_config)
self.assertFalse(hasattr(self._manager, 'network_helper'))
self.assertFalse(service_instance.NovaNetworkHelper.called)
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_no_service_user_defined(self):
group_name = 'GROUP_%s' % self.id()
config_data = {group_name: dict()}
with test_utils.create_temp_config_with_opts(config_data):
config = configuration.Configuration(
service_instance.common_opts, config_group=group_name)
self.assertRaises(
exception.ServiceInstanceException,
service_instance.ServiceInstanceManager, config)
def test_get_service_instance_name_using_driver_config(self):
fake_server_id = 'fake_share_server_id_%s' % self.id()
self.mock_object(service_instance, 'NeutronNetworkHelper')
self.mock_object(service_instance, 'NovaNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user',
service_instance_network_helper_type=service_instance.NOVA_NAME))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
result = self._manager._get_service_instance_name(fake_server_id)
self.assertIsNotNone(self._manager.driver_config)
self.assertEqual(
self._manager.get_config_option(
"service_instance_name_template") % "%s_%s" % (
self._manager.driver_config.config_group, fake_server_id),
result)
self.assertEqual(
True,
self._manager.get_config_option("driver_handles_share_servers"))
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NovaNetworkHelper.called)
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_get_service_instance_name_using_default_config(self):
fake_server_id = 'fake_share_server_id_%s' % self.id()
config_data = dict(CUSTOM=dict(
service_instance_user='fake_user',
service_instance_network_helper_type=service_instance.NOVA_NAME))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
result = self._manager._get_service_instance_name(fake_server_id)
self.assertIsNone(self._manager.driver_config)
self.assertEqual(
self._manager.get_config_option(
"service_instance_name_template") % fake_server_id, result)
def test__check_server_availability_available_from_start(self):
fake_server = dict(id='fake_server', ip='127.0.0.1')
self.mock_object(service_instance.socket.socket, 'connect')
self.mock_object(service_instance.time, 'sleep')
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=0))
result = self._manager._check_server_availability(fake_server)
self.assertTrue(result)
service_instance.socket.socket.connect.assert_called_once_with(
(fake_server['ip'], 22))
service_instance.time.time.assert_has_calls([
mock.call(), mock.call()])
service_instance.time.time.assert_has_calls([])
@ddt.data(True, False)
def test__check_server_availability_with_recall(self, is_ok):
fake_server = dict(id='fake_server', ip='fake_ip_address')
self.fake_time = 0
def fake_connect(addr):
if not(is_ok and self.fake_time > 1):
raise service_instance.socket.error
def fake_time():
return self.fake_time
def fake_sleep(time):
self.fake_time += 5
self.mock_object(service_instance.time, 'sleep',
mock.Mock(side_effect=fake_sleep))
self.mock_object(service_instance.socket.socket, 'connect',
mock.Mock(side_effect=fake_connect))
self.mock_object(service_instance.time, 'time',
mock.Mock(side_effect=fake_time))
self._manager.max_time_to_build_instance = 6
result = self._manager._check_server_availability(fake_server)
if is_ok:
self.assertTrue(result)
else:
self.assertFalse(result)
service_instance.socket.socket.connect.assert_has_calls([
mock.call((fake_server['ip'], 22)),
mock.call((fake_server['ip'], 22))])
service_instance.time.time.assert_has_calls([
mock.call(), mock.call(), mock.call()])
service_instance.time.time.assert_has_calls([mock.call()])
def test_get_server_ip_found_in_networks_section(self):
ip = '10.0.0.1'
net_name = self._manager.get_config_option('service_network_name')
fake_server = dict(networks={net_name: [ip]})
result = self._manager._get_server_ip(fake_server, net_name)
self.assertEqual(ip, result)
def test_get_server_ip_found_in_addresses_section(self):
ip = '10.0.0.1'
net_name = self._manager.get_config_option('service_network_name')
fake_server = dict(addresses={net_name: [dict(addr=ip, version=4)]})
result = self._manager._get_server_ip(fake_server, net_name)
self.assertEqual(ip, result)
@ddt.data(
{},
{'networks': {fake_get_config_option('service_network_name'): []}},
{'addresses': {fake_get_config_option('service_network_name'): []}})
def test_get_server_ip_not_found(self, data):
self.assertRaises(
exception.ManilaException,
self._manager._get_server_ip, data,
fake_get_config_option('service_network_name'))
def test_security_group_name_not_specified(self):
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=None))
result = self._manager._get_or_create_security_group(
self._manager.admin_context)
self.assertIsNone(result)
self._manager.get_config_option.assert_called_once_with(
'service_instance_security_group')
def test_security_group_name_from_config_and_sg_exist(self):
fake_secgroup = fake_compute.FakeSecurityGroup(name="fake_sg_name")
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value="fake_sg_name"))
self.mock_object(self._manager.compute_api, 'security_group_list',
mock.Mock(return_value=[fake_secgroup, ]))
result = self._manager._get_or_create_security_group(
self._manager.admin_context)
self.assertEqual(fake_secgroup, result)
self._manager.get_config_option.assert_has_calls([
mock.call('service_instance_security_group'),
])
self._manager.compute_api.security_group_list.assert_called_once_with(
self._manager.admin_context)
def test_security_group_creation_with_name_from_config(self):
name = "fake_sg_name"
desc = "fake_sg_description"
fake_secgroup = fake_compute.FakeSecurityGroup(name=name,
description=desc)
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=name))
self.mock_object(self._manager.compute_api, 'security_group_list',
mock.Mock(return_value=[]))
self.mock_object(self._manager.compute_api, 'security_group_create',
mock.Mock(return_value=fake_secgroup))
self.mock_object(self._manager.compute_api,
'security_group_rule_create')
result = self._manager._get_or_create_security_group(
context=self._manager.admin_context,
name=None,
description=desc,
)
self.assertEqual(fake_secgroup, result)
self._manager.compute_api.security_group_list.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.security_group_create.\
assert_called_once_with(self._manager.admin_context, name, desc)
self._manager.get_config_option.assert_has_calls([
mock.call('service_instance_security_group'),
])
def test_security_group_creation_with_provided_name(self):
name = "fake_sg_name"
fake_secgroup = fake_compute.FakeSecurityGroup(name=name)
self.mock_object(self._manager.compute_api, 'security_group_list',
mock.Mock(return_value=[]))
self.mock_object(self._manager.compute_api, 'security_group_create',
mock.Mock(return_value=fake_secgroup))
self.mock_object(self._manager.compute_api,
'security_group_rule_create')
result = self._manager._get_or_create_security_group(
context=self._manager.admin_context, name=name)
self._manager.compute_api.security_group_list.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.security_group_create.\
assert_called_once_with(
self._manager.admin_context, name, mock.ANY)
self.assertEqual(fake_secgroup, result)
def test_security_group_two_sg_in_list(self):
name = "fake_name"
fake_secgroup1 = fake_compute.FakeSecurityGroup(name=name)
fake_secgroup2 = fake_compute.FakeSecurityGroup(name=name)
self.mock_object(self._manager.compute_api, 'security_group_list',
mock.Mock(return_value=[fake_secgroup1,
fake_secgroup2]))
self.assertRaises(exception.ServiceInstanceException,
self._manager._get_or_create_security_group,
self._manager.admin_context,
name)
self._manager.compute_api.security_group_list.assert_called_once_with(
self._manager.admin_context)
@ddt.data(
dict(),
dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'),
)
def test_set_up_service_instance(self, update_data):
fake_network_info = {'foo': 'bar', 'server_id': 'fake_server_id'}
fake_server = {
'id': 'fake', 'ip': '1.2.3.4', 'public_address': '1.2.3.4',
'pk_path': None, 'subnet_id': 'fake-subnet-id',
'router_id': 'fake-router-id',
'username': self._manager.get_config_option(
'service_instance_user'),
'admin_ip': 'admin_ip'}
fake_server.update(update_data)
expected_details = fake_server.copy()
expected_details.pop('pk_path')
expected_details['instance_id'] = expected_details.pop('id')
self.mock_object(self._manager, '_create_service_instance',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability')
result = self._manager.set_up_service_instance(
self._manager.admin_context, fake_network_info)
self._manager._create_service_instance.assert_called_once_with(
self._manager.admin_context,
fake_network_info['server_id'], fake_network_info)
self._manager._check_server_availability.assert_called_once_with(
expected_details)
self.assertEqual(expected_details, result)
def test_set_up_service_instance_not_available(self):
fake_network_info = {'foo': 'bar', 'server_id': 'fake_server_id'}
fake_server = {
'id': 'fake', 'ip': '1.2.3.4', 'public_address': '1.2.3.4',
'pk_path': None, 'subnet_id': 'fake-subnet-id',
'router_id': 'fake-router-id',
'username': self._manager.get_config_option(
'service_instance_user'),
'admin_ip': 'admin_ip'}
expected_details = fake_server.copy()
expected_details.pop('pk_path')
expected_details['instance_id'] = expected_details.pop('id')
self.mock_object(self._manager, '_create_service_instance',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=False))
result = self.assertRaises(
exception.ServiceInstanceException,
self._manager.set_up_service_instance,
self._manager.admin_context, fake_network_info)
self.assertTrue(hasattr(result, 'detail_data'))
self.assertEqual(
{'server_details': expected_details}, result.detail_data)
self._manager._create_service_instance.assert_called_once_with(
self._manager.admin_context,
fake_network_info['server_id'], fake_network_info)
self._manager._check_server_availability.assert_called_once_with(
expected_details)
def test_ensure_server(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
fake_server = fake_compute.FakeServer()
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=fake_server))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self._manager._check_server_availability.assert_called_once_with(
server_details)
self.assertTrue(result)
def test_ensure_server_not_exists(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.InstanceNotFound(
instance_id=server_details['instance_id'])))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self.assertFalse(self._manager._check_server_availability.called)
self.assertFalse(result)
def test_ensure_server_exception(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.ManilaException))
self.assertRaises(exception.ManilaException,
self._manager.ensure_service_instance,
self._manager.admin_context,
server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self.assertFalse(self._manager._check_server_availability.called)
def test_ensure_server_non_active(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
fake_server = fake_compute.FakeServer(status='ERROR')
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self.assertFalse(self._manager._check_server_availability.called)
self.assertFalse(result)
def test_ensure_server_no_instance_id(self):
# Tests that we avoid a KeyError if the share details don't have an
# instance_id key set (so we can't find the share instance).
self.assertFalse(self._manager.ensure_service_instance(
self._manager.admin_context, {'ip': '1.2.3.4'}))
def test_get_key_create_new(self):
keypair_name = self._manager.get_config_option(
'manila_service_keypair_name')
fake_keypair = fake_compute.FakeKeypair(name=keypair_name)
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.keypair_import.assert_called_once_with(
self._manager.admin_context, keypair_name, '')
def test_get_key_exists(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
self.mock_object(self._manager, '_execute',
mock.Mock(return_value=('fake_public_key', '')))
result = self._manager._get_key(self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self.assertFalse(self._manager.compute_api.keypair_import.called)
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
def test_get_key_exists_recreate(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key1')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
self.mock_object(self._manager.compute_api, 'keypair_delete')
self.mock_object(self._manager, '_execute',
mock.Mock(return_value=('fake_public_key2', '')))
result = self._manager._get_key(self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.keypair_delete.assert_called_once_with(
self._manager.admin_context, fake_keypair.id)
self._manager.compute_api.keypair_import.assert_called_once_with(
self._manager.admin_context, fake_keypair.name, 'fake_public_key2')
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
def test_get_key_more_than_one_exist(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key1')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair, fake_keypair]))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_key, self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
def test_get_key_keypath_to_public_not_set(self):
self._manager.path_to_public_key = None
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_keypath_to_private_not_set(self):
self._manager.path_to_private_key = None
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_incorrect_keypath_to_public(self):
def exists_side_effect(path):
return False if path == 'fake_path' else True
self._manager.path_to_public_key = 'fake_path'
os_path_exists_mock = mock.Mock(side_effect=exists_side_effect)
with mock.patch.object(os.path, 'exists', os_path_exists_mock):
with mock.patch.object(os.path, 'expanduser',
mock.Mock(side_effect=lambda value: value)):
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_incorrect_keypath_to_private(self):
def exists_side_effect(path):
return False if path == 'fake_path' else True
self._manager.path_to_private_key = 'fake_path'
os_path_exists_mock = mock.Mock(side_effect=exists_side_effect)
with mock.patch.object(os.path, 'exists', os_path_exists_mock):
with mock.patch.object(os.path, 'expanduser',
mock.Mock(side_effect=lambda value: value)):
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_service_image(self):
fake_image1 = fake_compute.FakeImage(
name=self._manager.get_config_option('service_image_name'))
fake_image2 = fake_compute.FakeImage(name='another-image')
self.mock_object(self._manager.compute_api, 'image_list',
mock.Mock(return_value=[fake_image1, fake_image2]))
result = self._manager._get_service_image(self._manager.admin_context)
self.assertEqual(fake_image1.id, result)
def test_get_service_image_not_found(self):
self.mock_object(self._manager.compute_api, 'image_list',
mock.Mock(return_value=[]))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_service_image, self._manager.admin_context)
def test_get_service_image_ambiguous(self):
fake_image = fake_compute.FakeImage(
name=fake_get_config_option('service_image_name'))
fake_images = [fake_image, fake_image]
self.mock_object(self._manager.compute_api, 'image_list',
mock.Mock(return_value=fake_images))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_service_image, self._manager.admin_context)
def test__delete_server_not_found(self):
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(
self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.InstanceNotFound(
instance_id=self.instance_id)))
self._manager._delete_server(
self._manager.admin_context, self.instance_id)
self.assertFalse(self._manager.compute_api.server_delete.called)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, self.instance_id)
def test__delete_server(self):
def fake_server_get(*args, **kwargs):
ctx = args[0]
if not hasattr(ctx, 'called'):
ctx.called = True
return
else:
raise exception.InstanceNotFound(instance_id=self.instance_id)
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=fake_server_get))
self._manager._delete_server(
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_delete.assert_called_once_with(
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_get.assert_has_calls([
mock.call(self._manager.admin_context, self.instance_id),
mock.call(self._manager.admin_context, self.instance_id)])
def test__delete_server_found_always(self):
self.fake_time = 0
def fake_time():
return self.fake_time
def fake_sleep(time):
self.fake_time += 1
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(self._manager.compute_api, 'server_get')
self.mock_object(service_instance, 'time')
self.mock_object(
service_instance.time, 'time', mock.Mock(side_effect=fake_time))
self.mock_object(
service_instance.time, 'sleep', mock.Mock(side_effect=fake_sleep))
self.mock_object(self._manager, 'max_time_to_build_instance', 2)
self.assertRaises(
exception.ServiceInstanceException, self._manager._delete_server,
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_delete.assert_called_once_with(
self._manager.admin_context, self.instance_id)
service_instance.time.sleep.assert_has_calls(
[mock.call(mock.ANY) for i in range(2)])
service_instance.time.time.assert_has_calls(
[mock.call() for i in range(4)])
self._manager.compute_api.server_get.assert_has_calls(
[mock.call(self._manager.admin_context,
self.instance_id) for i in range(3)])
def test_delete_service_instance(self):
fake_server_details = dict(
router_id='foo', subnet_id='bar', instance_id='quuz')
self.mock_object(self._manager, '_delete_server')
self.mock_object(self._manager.network_helper, 'teardown_network')
self._manager.delete_service_instance(
self._manager.admin_context, fake_server_details)
self._manager._delete_server.assert_called_once_with(
self._manager.admin_context, fake_server_details['instance_id'])
self._manager.network_helper.teardown_network.assert_called_once_with(
fake_server_details)
@ddt.data(
*[{'s': s, 't': t, 'server': server}
for s, t in (
('fake_net_s', 'fake_net_t'),
('fake_net_s', '12.34.56.78'),
('98.76.54.123', 'fake_net_t'),
('98.76.54.123', '12.34.56.78'))
for server in (
{'networks': {
'fake_net_s': ['foo', '98.76.54.123', 'bar'],
'fake_net_t': ['baar', '12.34.56.78', 'quuz']}},
{'addresses': {
'fake_net_s': [
{'addr': 'fake1'},
{'addr': '98.76.54.123'},
{'addr': 'fake2'}],
'fake_net_t': [
{'addr': 'fake3'},
{'addr': '12.34.56.78'},
{'addr': 'fake4'}],
}})])
@ddt.unpack
def test_get_common_server_valid_cases(self, s, t, server):
self._get_common_server(s, t, server, True)
@ddt.data(
*[{'s': s, 't': t, 'server': server}
for s, t in (
('fake_net_s', 'fake'),
('fake', 'fake_net_t'),
('fake', 'fake'),
('98.76.54.123', '12.12.12.1212'),
('12.12.12.1212', '12.34.56.78'),
('12.12.12.1212', '12.12.12.1212'))
for server in (
{'networks': {
'fake_net_s': ['foo', '98.76.54.123', 'bar'],
'fake_net_t': ['baar', '12.34.56.78', 'quuz']}},
{'addresses': {
'fake_net_s': [
{'addr': 'fake1'},
{'addr': '98.76.54.123'},
{'addr': 'fake2'}],
'fake_net_t': [
{'addr': 'fake3'},
{'addr': '12.34.56.78'},
{'addr': 'fake4'}],
}})])
@ddt.unpack
def test_get_common_server_invalid_cases(self, s, t, server):
self._get_common_server(s, t, server, False)
def _get_common_server(self, s, t, server, is_valid=True):
fake_instance_id = 'fake_instance_id'
fake_user = 'fake_user'
fake_pass = 'fake_pass'
fake_addr_s = '98.76.54.123'
fake_addr_t = '12.34.56.78'
fake_server = {'id': fake_instance_id}
fake_server.update(server)
expected = {
'backend_details': {
'username': fake_user,
'password': fake_pass,
'pk_path': self._manager.path_to_private_key,
'ip': fake_addr_s,
'public_address': fake_addr_t,
'instance_id': fake_instance_id,
}
}
def fake_get_config_option(attr):
if attr == 'service_net_name_or_ip':
return s
elif attr == 'tenant_net_name_or_ip':
return t
elif attr == 'service_instance_name_or_id':
return fake_instance_id
elif attr == 'service_instance_user':
return fake_user
elif attr == 'service_instance_password':
return fake_pass
else:
raise exception.ManilaException("Wrong test data provided.")
self.mock_object(
self._manager.compute_api, 'server_get_by_name_or_id',
mock.Mock(return_value=fake_server))
self.mock_object(
self._manager, 'get_config_option',
mock.Mock(side_effect=fake_get_config_option))
if is_valid:
actual = self._manager.get_common_server()
self.assertEqual(expected, actual)
else:
self.assertRaises(
exception.ManilaException,
self._manager.get_common_server)
self.assertTrue(
self._manager.compute_api.server_get_by_name_or_id.called)
@ddt.data(service_instance.NOVA_NAME, service_instance.NEUTRON_NAME)
def test___create_service_instance_with_sg_success(self, helper_type):
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
self.mock_object(service_instance, 'NovaNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
config_data = dict(DEFAULT=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user',
service_instance_network_helper_type=helper_type))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
server_create = dict(id='fakeid', status='CREATING', networks=dict())
net_name = self._manager.get_config_option("service_network_name")
sg = type('FakeSG', (object, ), dict(id='fakeid', name='fakename'))
ip_address = 'fake_ip_address'
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = {'nics': ['fake_nic1', 'fake_nic2']}
if helper_type == service_instance.NEUTRON_NAME:
network_data['router'] = dict(id='fake_router_id')
server_get = dict(
id='fakeid', status='ACTIVE', networks={net_name: [ip_address]})
if helper_type == service_instance.NEUTRON_NAME:
network_data.update(dict(
router_id='fake_router_id', subnet_id='fake_subnet_id',
public_port=dict(id='fake_public_port',
fixed_ips=[dict(ip_address=ip_address)]),
service_port=dict(id='fake_service_port',
fixed_ips=[{'ip_address': ip_address}]),
admin_port={'id': 'fake_admin_port',
'fixed_ips': [{'ip_address': ip_address}]}))
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=5))
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager.network_helper, 'get_network_name',
mock.Mock(return_value=net_name))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager, '_get_or_create_security_group',
mock.Mock(return_value=sg))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_get))
self.mock_object(self._manager.compute_api,
'add_security_group_to_server')
expected = {
'id': server_get['id'],
'status': server_get['status'],
'pk_path': key_data[1],
'public_address': ip_address,
'router_id': network_data.get('router_id'),
'subnet_id': network_data.get('subnet_id'),
'instance_id': server_get['id'],
'ip': ip_address,
'networks': server_get['networks']}
if helper_type == service_instance.NEUTRON_NAME:
expected['router_id'] = network_data['router']['id']
expected['public_port_id'] = 'fake_public_port'
expected['service_port_id'] = 'fake_service_port'
expected['admin_port_id'] = 'fake_admin_port'
expected['admin_ip'] = 'fake_ip_address'
result = self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
self.assertEqual(expected, result)
self.assertTrue(service_instance.time.time.called)
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager._get_or_create_security_group.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor=100,
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_create['id'])
if helper_type == service_instance.NEUTRON_NAME:
self._manager.compute_api.add_security_group_to_server.\
assert_called_once_with(
self._manager.admin_context, server_get['id'], sg.id)
self._manager.network_helper.get_network_name.assert_has_calls([])
else:
self._manager.compute_api.add_security_group_to_server.\
assert_called_once_with(
self._manager.admin_context, server_get['id'], sg.name)
self._manager.network_helper.get_network_name.\
assert_called_once_with(network_info)
def test___create_service_instance_neutron_no_admin_ip(self):
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
config_data = {'DEFAULT': {
'driver_handles_share_servers': True,
'service_instance_user': 'fake_user',
'service_instance_network_helper_type': (
service_instance.NEUTRON_NAME)}}
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
server_create = {'id': 'fakeid', 'status': 'CREATING', 'networks': {}}
net_name = self._manager.get_config_option("service_network_name")
sg = type('FakeSG', (object, ), {'id': 'fakeid', 'name': 'fakename'})
ip_address = 'fake_ip_address'
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = {}
network_data = {
'nics': ['fake_nic1', 'fake_nic2'],
'router_id': 'fake_router_id', 'subnet_id': 'fake_subnet_id',
'public_port': {'id': 'fake_public_port',
'fixed_ips': [{'ip_address': ip_address}]},
'service_port': {'id': 'fake_service_port',
'fixed_ips': [{'ip_address': ip_address}]},
'admin_port': {'id': 'fake_admin_port',
'fixed_ips': []},
'router': {'id': 'fake_router_id'}}
server_get = {
'id': 'fakeid', 'status': 'ACTIVE', 'networks':
{net_name: [ip_address]}}
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=5))
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager.network_helper, 'get_network_name',
mock.Mock(return_value=net_name))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager, '_get_or_create_security_group',
mock.Mock(return_value=sg))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_get))
self.mock_object(self._manager.compute_api,
'add_security_group_to_server')
self.assertRaises(
exception.AdminIPNotFound, self._manager._create_service_instance,
self._manager.admin_context, instance_name, network_info)
self.assertTrue(service_instance.time.time.called)
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager._get_or_create_security_group.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor=100,
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_create['id'])
self._manager.compute_api.add_security_group_to_server.\
assert_called_once_with(
self._manager.admin_context, server_get['id'], sg.id)
self._manager.network_helper.get_network_name.assert_has_calls([])
@ddt.data(
dict(
instance_id_included=False,
mockobj=mock.Mock(side_effect=exception.ServiceInstanceException)),
dict(
instance_id_included=True,
mockobj=mock.Mock(return_value=dict(id='fakeid', status='ERROR'))))
@ddt.unpack
def test___create_service_instance_failed_to_create(
self, instance_id_included, mockobj):
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = dict(
nics=['fake_nic1', 'fake_nic2'],
router_id='fake_router_id', subnet_id='fake_subnet_id')
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(
self._manager.compute_api, 'server_create', mockobj)
self.mock_object(
self._manager, 'wait_for_instance_to_be_active',
mock.Mock(side_effect=exception.ServiceInstanceException))
try:
self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
except exception.ServiceInstanceException as e:
expected = dict(server_details=dict(
subnet_id=network_data['subnet_id'],
router_id=network_data['router_id']))
if instance_id_included:
expected['server_details']['instance_id'] = 'fakeid'
self.assertEqual(expected, e.detail_data)
else:
raise exception.ManilaException('Expected error was not raised.')
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor=100,
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
def test___create_service_instance_failed_to_build(self):
server_create = dict(id='fakeid', status='CREATING', networks=dict())
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = dict(
nics=['fake_nic1', 'fake_nic2'],
router_id='fake_router_id', subnet_id='fake_subnet_id')
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(
self._manager, 'wait_for_instance_to_be_active',
mock.Mock(side_effect=exception.ServiceInstanceException))
try:
self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
except exception.ServiceInstanceException as e:
self.assertEqual(
dict(server_details=dict(subnet_id=network_data['subnet_id'],
router_id=network_data['router_id'],
instance_id=server_create['id'])),
e.detail_data)
else:
raise exception.ManilaException('Expected error was not raised.')
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor=100,
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
@ddt.data(
dict(name=None, path=None),
dict(name=None, path='/tmp'))
@ddt.unpack
def test__create_service_instance_no_key_and_no_path(self, name, path):
key_data = name, path
self.mock_object(self._manager, '_get_service_image')
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._create_service_instance,
self._manager.admin_context, 'fake_instance_name', dict())
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
@mock.patch('time.sleep')
@mock.patch('time.time')
def _test_wait_for_instance(self, mock_time, mock_sleep,
server_get_side_eff=None,
expected_try_count=1,
expected_sleep_count=0,
expected_ret_val=None,
expected_exc=None):
mock_server_get = mock.Mock(side_effect=server_get_side_eff)
self.mock_object(self._manager.compute_api, 'server_get',
mock_server_get)
self.fake_time = 0
def fake_time():
return self.fake_time
def fake_sleep(sleep_time):
self.fake_time += sleep_time
# Note(lpetrut): LOG methods can call time.time
mock_time.side_effect = fake_time
mock_sleep.side_effect = fake_sleep
timeout = 3
if expected_exc:
self.assertRaises(
expected_exc,
self._manager.wait_for_instance_to_be_active,
instance_id=mock.sentinel.instance_id,
timeout=timeout)
else:
instance = self._manager.wait_for_instance_to_be_active(
instance_id=mock.sentinel.instance_id,
timeout=timeout)
self.assertEqual(expected_ret_val, instance)
mock_server_get.assert_has_calls(
[mock.call(self._manager.admin_context,
mock.sentinel.instance_id)] * expected_try_count)
mock_sleep.assert_has_calls([mock.call(1)] * expected_sleep_count)
def test_wait_for_instance_timeout(self):
server_get_side_eff = [
exception.InstanceNotFound(
instance_id=mock.sentinel.instance_id),
{'status': 'BUILDING'},
{'status': 'ACTIVE'}]
# Note that in this case, although the status is active, the
# 'networks' field is missing.
self._test_wait_for_instance(
server_get_side_eff=server_get_side_eff,
expected_exc=exception.ServiceInstanceException,
expected_try_count=3,
expected_sleep_count=3)
def test_wait_for_instance_error_state(self):
mock_instance = {'status': 'ERROR'}
self._test_wait_for_instance(
server_get_side_eff=[mock_instance],
expected_exc=exception.ServiceInstanceException,
expected_try_count=1)
def test_wait_for_instance_available(self):
mock_instance = {'status': 'ACTIVE',
'networks': mock.sentinel.networks}
self._test_wait_for_instance(
server_get_side_eff=[mock_instance],
expected_try_count=1,
expected_ret_val=mock_instance)
def test_reboot_server(self):
fake_server = {'instance_id': mock.sentinel.instance_id}
soft_reboot = True
mock_reboot = mock.Mock()
self.mock_object(self._manager.compute_api, 'server_reboot',
mock_reboot)
self._manager.reboot_server(fake_server, soft_reboot)
mock_reboot.assert_called_once_with(self._manager.admin_context,
fake_server['instance_id'],
soft_reboot)
class BaseNetworkHelperTestCase(test.TestCase):
"""Tests Base network helper for service instance."""
def test_instantiate_valid(self):
class FakeNetworkHelper(service_instance.BaseNetworkhelper):
@property
def NAME(self):
return 'fake_NAME'
def __init__(self, service_instance_manager):
self.fake_init = 'fake_init_value'
def get_network_name(self, network_info):
return 'fake_network_name'
def setup_connectivity_with_service_instances(self):
return 'fake_setup_connectivity_with_service_instances'
def setup_network(self, network_info):
return 'fake_setup_network'
def teardown_network(self, server_details):
return 'fake_teardown_network'
instance = FakeNetworkHelper('fake')
attrs = [
'fake_init', 'NAME', 'get_network_name', 'teardown_network',
'setup_connectivity_with_service_instances', 'setup_network',
]
for attr in attrs:
self.assertTrue(hasattr(instance, attr))
self.assertEqual('fake_init_value', instance.fake_init)
self.assertEqual('fake_NAME', instance.NAME)
self.assertEqual(
'fake_network_name', instance.get_network_name('fake'))
self.assertEqual(
'fake_setup_connectivity_with_service_instances',
instance.setup_connectivity_with_service_instances())
self.assertEqual('fake_setup_network', instance.setup_network('fake'))
self.assertEqual(
'fake_teardown_network', instance.teardown_network('fake'))
def test_instantiate_invalid(self):
self.assertRaises(
TypeError, service_instance.BaseNetworkhelper, 'fake')
@ddt.ddt
class NeutronNetworkHelperTestCase(test.TestCase):
"""Tests Neutron network helper for service instance."""
def setUp(self):
super(NeutronNetworkHelperTestCase, self).setUp()
self.mock_object(importutils, 'import_class')
self.fake_manager = FakeServiceInstance()
def _init_neutron_network_plugin(self):
self.mock_object(
service_instance.NeutronNetworkHelper, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
return service_instance.NeutronNetworkHelper(self.fake_manager)
def test_init_neutron_network_plugin(self):
instance = self._init_neutron_network_plugin()
self.assertEqual(service_instance.NEUTRON_NAME, instance.NAME)
attrs = [
'neutron_api', 'vif_driver', 'service_network_id',
'connect_share_server_to_tenant_network', 'get_config_option']
for attr in attrs:
self.assertTrue(hasattr(instance, attr), "No attr '%s'" % attr)
service_instance.NeutronNetworkHelper._get_service_network_id.\
assert_called_once_with()
self.assertEqual('DEFAULT', instance.neutron_api.config_group_name)
def test_init_neutron_network_plugin_with_driver_config_group(self):
self.fake_manager.driver_config = mock.Mock()
self.fake_manager.driver_config.config_group =\
'fake_config_group'
self.fake_manager.driver_config.network_config_group = None
instance = self._init_neutron_network_plugin()
self.assertEqual('fake_config_group',
instance.neutron_api.config_group_name)
def test_init_neutron_network_plugin_with_network_config_group(self):
self.fake_manager.driver_config = mock.Mock()
self.fake_manager.driver_config.config_group =\
"fake_config_group"
self.fake_manager.driver_config.network_config_group =\
"fake_network_config_group"
instance = self._init_neutron_network_plugin()
self.assertEqual('fake_network_config_group',
instance.neutron_api.config_group_name)
def test_admin_project_id(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
self.mock_class('manila.network.neutron.api.API', mock.Mock())
instance.neutron_api.admin_project_id = admin_project_id
self.assertEqual(admin_project_id, instance.admin_project_id)
def test_get_network_name(self):
network_info = dict(neutron_net_id='fake_neutron_net_id')
network = dict(name='fake_network_name')
instance = self._init_neutron_network_plugin()
self.mock_object(
instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
result = instance.get_network_name(network_info)
self.assertEqual(network['name'], result)
instance.neutron_api.get_network.assert_called_once_with(
network_info['neutron_net_id'])
def test_get_service_network_id_none_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id')
admin_project_id = 'fake_admin_project_id'
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[]))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'network_create',
mock.Mock(return_value=network))
instance = service_instance.NeutronNetworkHelper(self.fake_manager)
result = instance._get_service_network_id()
self.assertEqual(network['id'], result)
self.assertTrue(service_instance.neutron.API.
get_all_admin_project_networks.called)
service_instance.neutron.API.network_create.assert_has_calls([
mock.call(instance.admin_project_id, service_network_name)])
def test_get_service_network_id_one_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id', name=service_network_name)
admin_project_id = 'fake_admin_project_id'
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[network]))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
instance = service_instance.NeutronNetworkHelper(self.fake_manager)
result = instance._get_service_network_id()
self.assertEqual(network['id'], result)
self.assertTrue(service_instance.neutron.API.
get_all_admin_project_networks.called)
def test_get_service_network_id_two_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id', name=service_network_name)
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[network, network]))
helper = service_instance.NeutronNetworkHelper(self.fake_manager)
self.assertRaises(exception.ManilaException,
lambda: helper.service_network_id)
service_instance.neutron.API.get_all_admin_project_networks.\
assert_has_calls([mock.call()])
@ddt.data(dict(), dict(subnet_id='foo'), dict(router_id='bar'))
def test_teardown_network_no_service_data(self, server_details):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
instance.teardown_network(server_details)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
@ddt.data(
*[dict(server_details=sd, fail=f) for f in (True, False)
for sd in (dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'))]
)
@ddt.unpack
def test_teardown_network_with_ports(self, server_details, fail):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
if fail:
delete_port_mock = mock.Mock(
side_effect=exception.NetworkException(code=404))
else:
delete_port_mock = mock.Mock()
self.mock_object(instance.neutron_api, 'delete_port', delete_port_mock)
self.mock_object(service_instance.LOG, 'debug')
instance.teardown_network(server_details)
self.assertFalse(instance.neutron_api.router_remove_interface.called)
self.assertEqual(
len(server_details),
len(instance.neutron_api.delete_port.mock_calls))
for k, v in server_details.items():
self.assertIn(
mock.call(v), instance.neutron_api.delete_port.mock_calls)
if fail:
service_instance.LOG.debug.assert_has_calls([
mock.call(mock.ANY, mock.ANY) for sd in server_details
])
else:
service_instance.LOG.debug.assert_has_calls([])
@ddt.data(
dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'),
)
def test_teardown_network_with_ports_unhandled_exception(self,
server_details):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
delete_port_mock = mock.Mock(
side_effect=exception.NetworkException(code=500))
self.mock_object(
service_instance.neutron.API, 'delete_port', delete_port_mock)
self.mock_object(service_instance.LOG, 'debug')
self.assertRaises(
exception.NetworkException,
instance.teardown_network,
server_details,
)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
service_instance.neutron.API.delete_port.assert_called_once_with(
mock.ANY)
service_instance.LOG.debug.assert_has_calls([])
def test_teardown_network_with_wrong_ports(self):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'delete_port')
self.mock_object(service_instance.LOG, 'debug')
instance.teardown_network(dict(foo_id='fake_service_port_id'))
service_instance.neutron.API.router_remove_interface.assert_has_calls(
[])
service_instance.neutron.API.delete_port.assert_has_calls([])
service_instance.LOG.debug.assert_has_calls([])
def test_teardown_network_subnet_is_used(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'compute:foo'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
self.assertFalse(service_instance.neutron.API.update_subnet.called)
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['fixed_ips', 'device_id', 'device_owner'])
def test_teardown_network_subnet_not_used(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'network:router_interface'},
{'fixed_ips': [{'subnet_id': 'bar' + server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'compute'},
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': '',
'device_owner': 'compute'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
service_instance.neutron.API.router_remove_interface.\
assert_called_once_with('bar', 'foo')
service_instance.neutron.API.update_subnet.\
assert_called_once_with('foo', '')
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['fixed_ips', 'device_id', 'device_owner'])
def test_teardown_network_subnet_not_used_and_get_error_404(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'fake'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface',
mock.Mock(side_effect=exception.NetworkException(code=404)))
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
service_instance.neutron.API.router_remove_interface.\
assert_called_once_with('bar', 'foo')
service_instance.neutron.API.update_subnet.\
assert_called_once_with('foo', '')
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['fixed_ips', 'device_id', 'device_owner'])
def test_teardown_network_subnet_not_used_get_unhandled_error(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'fake'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface',
mock.Mock(side_effect=exception.NetworkException(code=500)))
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
self.assertRaises(
exception.NetworkException,
instance.teardown_network, server_details)
service_instance.neutron.API.router_remove_interface.\
assert_called_once_with('bar', 'foo')
self.assertFalse(service_instance.neutron.API.update_subnet.called)
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['fixed_ips', 'device_id', 'device_owner'])
def test_setup_network_and_connect_share_server_to_tenant_net(self):
def fake_create_port(*aargs, **kwargs):
if aargs[1] == 'fake_service_network_id':
return self.service_port
elif aargs[1] == 'fake_tenant_network_id':
return self.public_port
else:
raise exception.ManilaException('Got unexpected data')
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
self.public_port = dict(
id='fake_tenant_port_id',
fixed_ips=[dict(ip_address='fake_public_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = True
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=fake_create_port))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, 'setup_connectivity_with_service_instances',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
expected = {
'ip_address': self.public_port['fixed_ips'][0]['ip_address'],
'public_port': self.public_port,
'service_port': self.service_port,
'service_subnet': service_subnet,
'ports': [self.public_port, self.service_port],
'nics': [{'port-id': self.public_port['id']},
{'port-id': self.service_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
instance.setup_connectivity_with_service_instances.\
assert_called_once_with()
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
def test_setup_network_and_connect_share_server_to_tenant_net_admin(self):
def fake_create_port(*aargs, **kwargs):
if aargs[1] == 'fake_admin_network_id':
return self.admin_port
elif aargs[1] == 'fake_tenant_network_id':
return self.public_port
else:
raise exception.ManilaException('Got unexpected data')
admin_project_id = 'fake_admin_project_id'
network_info = {
'neutron_net_id': 'fake_tenant_network_id',
'neutron_subnet_id': 'fake_tenant_subnet_id'}
self.admin_port = {
'id': 'fake_admin_port_id',
'fixed_ips': [{'ip_address': 'fake_admin_port_ip_address'}]}
self.public_port = {
'id': 'fake_tenant_port_id',
'fixed_ips': [{'ip_address': 'fake_public_port_ip_address'}]}
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.use_service_network = False
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
instance.connect_share_server_to_tenant_network = True
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=fake_create_port))
self.mock_object(
instance, 'setup_connectivity_with_service_instances')
expected = {
'ip_address': self.public_port['fixed_ips'][0]['ip_address'],
'public_port': self.public_port,
'admin_port': self.admin_port,
'ports': [self.public_port, self.admin_port],
'nics': [{'port-id': self.public_port['id']},
{'port-id': self.admin_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
instance.setup_connectivity_with_service_instances.\
assert_called_once_with()
self.assertTrue(service_instance.neutron.API.create_port.called)
@ddt.data(None, exception.NetworkException(code=400))
def test_setup_network_using_router_success(self, return_obj):
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.admin_port = {
'id': 'fake_admin_port_id',
'fixed_ips': [{'ip_address': 'fake_admin_port_ip_address'}]}
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=[self.service_port, self.admin_port]))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface',
mock.Mock(side_effect=return_obj))
self.mock_object(instance, 'setup_connectivity_with_service_instances')
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
expected = {
'ip_address': self.service_port['fixed_ips'][0]['ip_address'],
'service_port': self.service_port,
'service_subnet': service_subnet,
'admin_port': self.admin_port, 'router': router,
'ports': [self.service_port, self.admin_port],
'nics': [{'port-id': self.service_port['id']},
{'port-id': self.admin_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
instance.setup_connectivity_with_service_instances.\
assert_called_once_with()
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
service_instance.neutron.API.router_add_interface.\
assert_called_once_with(router['id'], service_subnet['id'])
def test_setup_network_using_router_addon_of_interface_failed(self):
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface',
mock.Mock(side_effect=exception.NetworkException(code=500)))
self.mock_object(
instance, '_get_service_subnet',
mock.Mock(return_value=service_subnet))
self.assertRaises(
exception.NetworkException,
instance.setup_network, network_info)
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
service_instance.neutron.API.router_add_interface.\
assert_called_once_with(router['id'], service_subnet['id'])
def test_setup_network_using_router_connectivity_verification_fail(self):
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(return_value=self.service_port))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(service_instance.neutron.API, 'delete_port')
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface')
self.mock_object(
instance, 'setup_connectivity_with_service_instances',
mock.Mock(side_effect=exception.ManilaException('Fake')))
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
self.assertRaises(
exception.ManilaException, instance.setup_network, network_info)
instance.setup_connectivity_with_service_instances.\
assert_called_once_with()
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
service_instance.neutron.API.router_add_interface.\
assert_called_once_with(router['id'], service_subnet['id'])
service_instance.neutron.API.delete_port.assert_has_calls([
mock.call(self.service_port['id'])])
def test__get_cidr_for_subnet_success(self):
expected = (
fake_get_config_option('service_network_cidr').split('/')[0] +
'/' + six.text_type(
fake_get_config_option('service_network_division_mask')))
instance = self._init_neutron_network_plugin()
self.mock_object(
instance, '_get_all_service_subnets', mock.Mock(return_value=[]))
result = instance._get_cidr_for_subnet()
self.assertEqual(expected, result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_cidr_for_subnet_failure(self):
subnets = []
serv_cidr = netaddr.IPNetwork(
fake_get_config_option('service_network_cidr'))
division_mask = fake_get_config_option('service_network_division_mask')
for subnet in serv_cidr.subnet(division_mask):
subnets.append(dict(cidr=six.text_type(subnet.cidr)))
instance = self._init_neutron_network_plugin()
self.mock_object(
instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_cidr_for_subnet)
instance._get_all_service_subnets.assert_called_once_with()
def test_setup_connectivity_with_service_instances(self):
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
interface_name_service = 'fake_interface_name_service'
interface_name_admin = 'fake_interface_name_admin'
fake_division_mask = fake_get_config_option(
'service_network_division_mask')
fake_subnet_service = fake_network.FakeSubnet(
cidr='10.254.0.0/%s' % fake_division_mask)
fake_subnet_admin = fake_network.FakeSubnet(id='fake_admin_subnet_id',
cidr='10.0.0.0/24')
fake_service_port = fake_network.FakePort(fixed_ips=[
{'subnet_id': fake_subnet_service['id'],
'ip_address': '10.254.0.2'}], mac_address='fake_mac_address')
fake_admin_port = fake_network.FakePort(fixed_ips=[
{'subnet_id': fake_subnet_admin['id'], 'ip_address': '10.0.0.4'}],
mac_address='fake_mac_address')
self.mock_object(instance, '_get_service_port',
mock.Mock(side_effect=[fake_service_port,
fake_admin_port]))
self.mock_object(instance, '_add_fixed_ips_to_service_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.vif_driver, 'get_device_name',
mock.Mock(side_effect=[interface_name_service,
interface_name_admin]))
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(side_effect=[fake_subnet_service,
fake_subnet_admin,
fake_subnet_admin]))
self.mock_object(instance, '_remove_outdated_interfaces')
self.mock_object(instance.vif_driver, 'plug')
device_mock = mock.Mock()
self.mock_object(service_instance.ip_lib, 'IPDevice',
mock.Mock(return_value=device_mock))
instance.setup_connectivity_with_service_instances()
instance._get_service_port.assert_has_calls([
mock.call(instance.service_network_id, None, 'manila-share'),
mock.call('fake_admin_network_id', 'fake_admin_subnet_id',
'manila-admin-share')])
instance.vif_driver.get_device_name.assert_has_calls([
mock.call(fake_service_port), mock.call(fake_admin_port)])
instance.vif_driver.plug.assert_has_calls([
mock.call(interface_name_service, fake_service_port['id'],
fake_service_port['mac_address']),
mock.call(interface_name_admin, fake_admin_port['id'],
fake_admin_port['mac_address'])])
instance.neutron_api.get_subnet.assert_has_calls([
mock.call(fake_subnet_service['id']),
mock.call(fake_subnet_admin['id']),
mock.call(fake_subnet_admin['id'])])
instance.vif_driver.init_l3.assert_has_calls([
mock.call(interface_name_service,
['10.254.0.2/%s' % fake_division_mask]),
mock.call(interface_name_admin, ['10.0.0.4/24'])])
service_instance.ip_lib.IPDevice.assert_has_calls([
mock.call(interface_name_service),
mock.call(interface_name_admin)])
device_mock.route.pullup_route.assert_has_calls([
mock.call(interface_name_service),
mock.call(interface_name_admin)])
instance._remove_outdated_interfaces.assert_called_with(device_mock)
def test__get_set_of_device_cidrs(self):
device = fake_network.FakeDevice('foo')
expected = set(('1.0.0.0/27', '2.0.0.0/27'))
instance = self._init_neutron_network_plugin()
result = instance._get_set_of_device_cidrs(device)
self.assertEqual(expected, result)
def test__get_set_of_device_cidrs_exception(self):
device = fake_network.FakeDevice('foo')
self.mock_object(device.addr, 'list', mock.Mock(
side_effect=Exception('foo does not exist')))
instance = self._init_neutron_network_plugin()
result = instance._get_set_of_device_cidrs(device)
self.assertEqual(set(), result)
def test__remove_outdated_interfaces(self):
device = fake_network.FakeDevice(
'foobarquuz', [dict(ip_version=4, cidr='1.0.0.0/27')])
devices = [fake_network.FakeDevice('foobar')]
instance = self._init_neutron_network_plugin()
self.mock_object(instance.vif_driver, 'unplug')
self.mock_object(
service_instance.ip_lib.IPWrapper, 'get_devices',
mock.Mock(return_value=devices))
instance._remove_outdated_interfaces(device)
instance.vif_driver.unplug.assert_called_once_with('foobar')
def test__get_service_port_none_exist(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
fake_port_values = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
fake_service_port = fake_network.FakePort(device_id='manila-share')
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port_values)
instance.neutron_api.create_port.assert_called_once_with(
instance.admin_project_id, instance.service_network_id,
device_id='manila-share', device_owner='manila:share',
host_id='fake_host', subnet_id=None)
service_instance.socket.gethostname.assert_called_once_with()
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_one_exist_on_same_host(self):
instance = self._init_neutron_network_plugin()
fake_port_values = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
fake_service_port = fake_network.FakePort(**fake_port_values)
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[fake_service_port]))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port_values)
self.assertFalse(instance.neutron_api.create_port.called)
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_one_exist_on_different_host(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
fake_port = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
fake_service_port = fake_network.FakePort(**fake_port)
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port)
instance.neutron_api.create_port.assert_called_once_with(
instance.admin_project_id, instance.service_network_id,
device_id='manila-share', device_owner='manila:share',
host_id='fake_host', subnet_id=None)
service_instance.socket.gethostname.assert_called_once_with()
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_two_exist_on_same_host(self):
instance = self._init_neutron_network_plugin()
fake_service_port = fake_network.FakePort(**{
'device_id': 'manila-share', 'binding:host_id': 'fake_host'})
self.mock_object(
instance.neutron_api, 'list_ports',
mock.Mock(return_value=[fake_service_port, fake_service_port]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.assertRaises(
exception.ServiceInstanceException, instance._get_service_port,
instance.service_network_id, None, 'manila-share')
self.assertFalse(instance.neutron_api.create_port.called)
def test__add_fixed_ips_to_service_port(self):
ip_address1 = '13.0.0.13'
subnet_id1 = 'fake_subnet_id1'
subnet_id2 = 'fake_subnet_id2'
port = dict(id='fooport', fixed_ips=[dict(
subnet_id=subnet_id1, ip_address=ip_address1)])
expected = mock.Mock()
network = dict(subnets=[subnet_id1, subnet_id2])
instance = self._init_neutron_network_plugin()
self.mock_object(instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=expected))
result = instance._add_fixed_ips_to_service_port(port)
self.assertEqual(expected, result)
instance.neutron_api.get_network.assert_called_once_with(
instance.service_network_id)
instance.neutron_api.update_port_fixed_ips.assert_called_once_with(
port['id'], dict(fixed_ips=[
dict(subnet_id=subnet_id1, ip_address=ip_address1),
dict(subnet_id=subnet_id2)]))
def test__get_private_router_success(self):
instance = self._init_neutron_network_plugin()
network = fake_network.FakeNetwork()
subnet = fake_network.FakeSubnet(gateway_ip='fake_ip')
router = fake_network.FakeRouter(id='fake_router_id')
port = fake_network.FakePort(fixed_ips=[
dict(subnet_id=subnet['id'],
ip_address=subnet['gateway_ip'])],
device_id=router['id'])
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[port]))
self.mock_object(instance.neutron_api, 'show_router',
mock.Mock(return_value=router))
result = instance._get_private_router(network['id'], subnet['id'])
self.assertEqual(router, result)
instance.neutron_api.get_subnet.assert_called_once_with(subnet['id'])
instance.neutron_api.list_ports.assert_called_once_with(
network_id=network['id'])
instance.neutron_api.show_router.assert_called_once_with(router['id'])
def test__get_private_router_no_gateway(self):
instance = self._init_neutron_network_plugin()
subnet = fake_network.FakeSubnet(gateway_ip='')
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_private_router, 'fake_network_id', subnet['id'])
instance.neutron_api.get_subnet.assert_called_once_with(
subnet['id'])
def test__get_private_router_subnet_is_not_attached_to_the_router(self):
instance = self._init_neutron_network_plugin()
network_id = 'fake_network_id'
subnet = fake_network.FakeSubnet(gateway_ip='fake_ip')
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_private_router, network_id, subnet['id'])
instance.neutron_api.get_subnet.assert_called_once_with(
subnet['id'])
instance.neutron_api.list_ports.assert_called_once_with(
network_id=network_id)
def test__get_service_subnet_none_found(self):
subnet_name = 'fake_subnet_name'
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=[]))
result = instance._get_service_subnet(subnet_name)
self.assertIsNone(result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_service_subnet_unused_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name=''),
fake_network.FakeSubnet(id='bar', name='quuz')]
instance = self._init_neutron_network_plugin()
self.mock_object(instance.neutron_api, 'update_subnet')
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
result = instance._get_service_subnet(subnet_name)
self.assertEqual(subnets[0], result)
instance._get_all_service_subnets.assert_called_once_with()
instance.neutron_api.update_subnet.assert_called_once_with(
subnets[0]['id'], subnet_name)
def test__get_service_subnet_one_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name='quuz'),
fake_network.FakeSubnet(id='bar', name=subnet_name)]
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
result = instance._get_service_subnet(subnet_name)
self.assertEqual(subnets[1], result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_service_subnet_two_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name=subnet_name),
fake_network.FakeSubnet(id='bar', name=subnet_name)]
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_service_subnet, subnet_name)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_all_service_subnets(self):
subnet_id1 = 'fake_subnet_id1'
subnet_id2 = 'fake_subnet_id2'
instance = self._init_neutron_network_plugin()
network = dict(subnets=[subnet_id1, subnet_id2])
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(side_effect=lambda s_id: dict(id=s_id)))
self.mock_object(instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
result = instance._get_all_service_subnets()
self.assertEqual([dict(id=subnet_id1), dict(id=subnet_id2)], result)
instance.neutron_api.get_network.assert_called_once_with(
instance.service_network_id)
instance.neutron_api.get_subnet.assert_has_calls([
mock.call(subnet_id1), mock.call(subnet_id2)])
@ddt.ddt
class NovaNetworkHelperTestCase(test.TestCase):
"""Tests Nova network helper for service instance."""
def setUp(self):
super(NovaNetworkHelperTestCase, self).setUp()
self.fake_manager = FakeServiceInstance()
def test_init(self):
instance = service_instance.NovaNetworkHelper(self.fake_manager)
self.assertEqual(service_instance.NOVA_NAME, instance.NAME)
self.assertIsNone(instance.teardown_network('fake'))
self.assertIsNone(
instance.setup_connectivity_with_service_instances())
def test_get_network_name(self):
network_info = dict(nova_net_id='fake_nova_net_id')
network = dict(label='fake_network')
instance = service_instance.NovaNetworkHelper(self.fake_manager)
self.mock_object(instance.compute_api, 'network_get',
mock.Mock(return_value=network))
result = instance.get_network_name(network_info)
self.assertEqual(network['label'], result)
instance.compute_api.network_get.assert_called_once_with(
instance.admin_context, network_info['nova_net_id'])
@ddt.data(None, [], {}, '')
def test_get_network_name_invalid(self, net_name):
network_info = dict(nova_net_id=net_name)
instance = service_instance.NovaNetworkHelper(self.fake_manager)
self.assertRaises(
exception.ManilaException, instance.get_network_name, network_info)
def test_setup_network(self):
network_info = dict(nova_net_id='fake_nova_net_id')
network = dict(label='fake_network', id='fake_network_id',
gateway='fake_gateway_ip')
instance = service_instance.NovaNetworkHelper(self.fake_manager)
self.mock_object(instance.compute_api, 'network_get',
mock.Mock(return_value=network))
expected = {
'nova_net_id': network_info['nova_net_id'],
'nics': [{'net-id': network['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
instance.compute_api.network_get.assert_called_once_with(
instance.admin_context, network_info['nova_net_id'])
@ddt.data(None, [], {}, '')
def test_setup_network_invalid(self, net_name):
network_info = dict(nova_net_id=net_name)
instance = service_instance.NovaNetworkHelper(self.fake_manager)
self.assertRaises(
exception.ManilaException, instance.get_network_name, network_info)
| 46.760516
| 79
| 0.658009
| 13,006
| 112,272
| 5.243657
| 0.035522
| 0.052259
| 0.045573
| 0.036496
| 0.855262
| 0.818355
| 0.76768
| 0.723383
| 0.696946
| 0.671447
| 0
| 0.00568
| 0.245751
| 112,272
| 2,400
| 80
| 46.78
| 0.799684
| 0.012862
| 0
| 0.621764
| 0
| 0
| 0.119006
| 0.036024
| 0
| 0
| 0
| 0
| 0.143816
| 1
| 0.065197
| false
| 0.002397
| 0.011026
| 0.005753
| 0.101151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
864c59e08bd2557de0ede22256930763f97b92d4
| 906
|
py
|
Python
|
atividade02/PGMMatrixInterface.py
|
chinaglia-rafa/PDI
|
d136c9803f88f9bf21a52c420cc2aa5fa93e79b9
|
[
"MIT"
] | null | null | null |
atividade02/PGMMatrixInterface.py
|
chinaglia-rafa/PDI
|
d136c9803f88f9bf21a52c420cc2aa5fa93e79b9
|
[
"MIT"
] | null | null | null |
atividade02/PGMMatrixInterface.py
|
chinaglia-rafa/PDI
|
d136c9803f88f9bf21a52c420cc2aa5fa93e79b9
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
class PGMMatrixInterface(ABC):
def __init__():
pass
@abstractmethod
def init(self, matrix):
pass
@abstractmethod
def str(self, matrix):
pass
@abstractmethod
def set_item(self, matrix, i, j, value, debug = False):
pass
@abstractmethod
def add(self, matrix, added_matrix):
pass
@abstractmethod
def sub(self, matrix, subtracted_matrix):
pass
@abstractmethod
def noise(self, matrix):
pass
@abstractmethod
def lighten(self, matrix, ammount):
pass
@abstractmethod
def darken(self, matrix, ammount):
pass
@abstractmethod
def black_and_white(self, matrix):
pass
@abstractmethod
def invert(self, matrix):
pass
@abstractmethod
def decompose(self, matrix, matrix_template):
pass
| 17.423077
| 59
| 0.613687
| 93
| 906
| 5.870968
| 0.354839
| 0.362637
| 0.423077
| 0.346154
| 0.423077
| 0.139194
| 0
| 0
| 0
| 0
| 0
| 0
| 0.306843
| 906
| 51
| 60
| 17.764706
| 0.869427
| 0
| 0
| 0.621622
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.324324
| false
| 0.324324
| 0.027027
| 0
| 0.378378
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
869a212503f58edc4405e561201cf665604c0e4a
| 201
|
py
|
Python
|
EnergyIntensityIndicators/Industry/__init__.py
|
NREL/EnergyIntensityIndicators
|
6d5a6d528ecd27b930d82088055224473ba2d63e
|
[
"BSD-3-Clause"
] | 7
|
2020-07-30T15:02:23.000Z
|
2022-01-23T20:02:55.000Z
|
EnergyIntensityIndicators/Industry/__init__.py
|
NREL/EnergyIntensityIndicators
|
6d5a6d528ecd27b930d82088055224473ba2d63e
|
[
"BSD-3-Clause"
] | 36
|
2020-06-18T15:47:32.000Z
|
2021-09-13T21:20:49.000Z
|
EnergyIntensityIndicators/Industry/__init__.py
|
NREL/EnergyIntensityIndicators
|
6d5a6d528ecd27b930d82088055224473ba2d63e
|
[
"BSD-3-Clause"
] | 2
|
2020-06-18T13:30:43.000Z
|
2020-11-17T11:34:10.000Z
|
"""
Industrial Sector Data Module
"""
"""
Industry Sector Data Module
"""
from .manufacturing import Manufacturing
from .nonmanufacuturing import NonManufacturing
from .asm_price_fit import Mfg_prices
| 20.1
| 47
| 0.80597
| 23
| 201
| 6.913043
| 0.652174
| 0.125786
| 0.201258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 201
| 10
| 48
| 20.1
| 0.898305
| 0.144279
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
86dc3c677e388b5eaf4633b40bf0f3018bc888fe
| 1,029
|
py
|
Python
|
userpath/core.py
|
ThatXliner/userpath
|
fb29f5a7049d9bc84729d2c2b5a893253e1d8eaa
|
[
"Apache-2.0",
"MIT"
] | 111
|
2017-09-21T02:02:55.000Z
|
2022-02-07T14:53:20.000Z
|
userpath/core.py
|
ThatXliner/userpath
|
fb29f5a7049d9bc84729d2c2b5a893253e1d8eaa
|
[
"Apache-2.0",
"MIT"
] | 30
|
2017-09-21T11:54:03.000Z
|
2022-03-14T15:53:23.000Z
|
userpath/core.py
|
ThatXliner/userpath
|
fb29f5a7049d9bc84729d2c2b5a893253e1d8eaa
|
[
"Apache-2.0",
"MIT"
] | 19
|
2017-09-21T08:59:55.000Z
|
2021-12-25T20:39:31.000Z
|
from .interface import Interface
from .utils import in_current_path
def prepend(location, app_name=None, shells=None, all_shells=False, home=None, check=False):
interface = Interface(shells=shells, all_shells=all_shells, home=home)
return interface.put(location, front=True, app_name=app_name, check=check)
def append(location, app_name=None, shells=None, all_shells=False, home=None, check=False):
interface = Interface(shells=shells, all_shells=all_shells, home=home)
return interface.put(location, front=False, app_name=app_name, check=check)
def in_new_path(location, shells=None, all_shells=False, home=None, check=False):
interface = Interface(shells=shells, all_shells=all_shells, home=home)
return interface.location_in_new_path(location, check=check)
def need_shell_restart(location, shells=None, all_shells=False, home=None):
interface = Interface(shells=shells, all_shells=all_shells, home=home)
return not in_current_path(location) and interface.location_in_new_path(location)
| 44.73913
| 92
| 0.787172
| 152
| 1,029
| 5.118421
| 0.190789
| 0.138817
| 0.154242
| 0.097686
| 0.812339
| 0.812339
| 0.736504
| 0.667095
| 0.605398
| 0.605398
| 0
| 0
| 0.107872
| 1,029
| 22
| 93
| 46.772727
| 0.847495
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
86fc7ef7da670556d7cdec389fcc24eb61ff198a
| 161
|
py
|
Python
|
portfolio_website/portfolio/admin.py
|
carolwanjohi/personal-portfolio
|
add2e305221c9598873ec38423afc63f0c54624f
|
[
"MIT"
] | null | null | null |
portfolio_website/portfolio/admin.py
|
carolwanjohi/personal-portfolio
|
add2e305221c9598873ec38423afc63f0c54624f
|
[
"MIT"
] | null | null | null |
portfolio_website/portfolio/admin.py
|
carolwanjohi/personal-portfolio
|
add2e305221c9598873ec38423afc63f0c54624f
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Creator, Project
# Register your models here.
admin.site.register(Creator)
admin.site.register(Project)
| 16.1
| 36
| 0.795031
| 22
| 161
| 5.818182
| 0.545455
| 0.140625
| 0.265625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124224
| 161
| 9
| 37
| 17.888889
| 0.907801
| 0.161491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8100074fa37d00bf783ca00bee93a1274d364c84
| 147
|
py
|
Python
|
UnitTesting/Columbus/forms.py
|
FalseG0d/AdvancedDjango
|
52715ffea132e591f98f94b781960fc12a8613e4
|
[
"MIT"
] | 9
|
2020-10-17T14:03:35.000Z
|
2022-01-12T17:51:14.000Z
|
UnitTesting/Columbus/forms.py
|
FalseG0d/AdvancedDjango
|
52715ffea132e591f98f94b781960fc12a8613e4
|
[
"MIT"
] | null | null | null |
UnitTesting/Columbus/forms.py
|
FalseG0d/AdvancedDjango
|
52715ffea132e591f98f94b781960fc12a8613e4
|
[
"MIT"
] | 4
|
2020-10-20T06:52:26.000Z
|
2022-01-07T23:51:59.000Z
|
from .models import Name
from django.forms import ModelForm
class NameForm(ModelForm):
class Meta:
model=Name
fields='__all__'
| 21
| 34
| 0.70068
| 18
| 147
| 5.5
| 0.722222
| 0.282828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231293
| 147
| 7
| 35
| 21
| 0.876106
| 0
| 0
| 0
| 0
| 0
| 0.047297
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
81286af76139b70180b9936ac20bebe0df06d4d1
| 102
|
py
|
Python
|
tunr/__init__.py
|
Starofall/VMTune
|
68db71b8162b2faab83fd9fdff7a270373b12c2a
|
[
"MIT"
] | 1
|
2019-11-17T22:49:17.000Z
|
2019-11-17T22:49:17.000Z
|
tunr/__init__.py
|
Starofall/VMTune
|
68db71b8162b2faab83fd9fdff7a270373b12c2a
|
[
"MIT"
] | null | null | null |
tunr/__init__.py
|
Starofall/VMTune
|
68db71b8162b2faab83fd9fdff7a270373b12c2a
|
[
"MIT"
] | null | null | null |
from colorama import Fore
def info(any, color=Fore.GREEN):
print(color + str(any) + Fore.RESET)
| 17
| 40
| 0.696078
| 16
| 102
| 4.4375
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 102
| 5
| 41
| 20.4
| 0.845238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8138a88c168a39a9a9c2133efad225c0e9988a07
| 104
|
py
|
Python
|
tags/api/tags/tags_route.py
|
andrequeiroz2/api-tags
|
ccc84c6b1ca5a6f907ea4c36db49a763c7d50445
|
[
"MIT"
] | null | null | null |
tags/api/tags/tags_route.py
|
andrequeiroz2/api-tags
|
ccc84c6b1ca5a6f907ea4c36db49a763c7d50445
|
[
"MIT"
] | null | null | null |
tags/api/tags/tags_route.py
|
andrequeiroz2/api-tags
|
ccc84c6b1ca5a6f907ea4c36db49a763c7d50445
|
[
"MIT"
] | null | null | null |
from .tags_api import TagsApi
def init_tags_api(api):
api.add_resource(TagsApi, "/api/users/tags")
| 20.8
| 48
| 0.75
| 17
| 104
| 4.352941
| 0.588235
| 0.189189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 104
| 4
| 49
| 26
| 0.813187
| 0
| 0
| 0
| 0
| 0
| 0.144231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8141609096114c44c0af062774d3a6f41c101bd8
| 229
|
py
|
Python
|
app/api/dependencies/tasks.py
|
umluizlima/email-sender
|
d952874918fc6edc896dabe6d1c1e1391f9d8697
|
[
"MIT"
] | 7
|
2020-05-24T16:49:05.000Z
|
2021-05-03T18:50:00.000Z
|
app/api/dependencies/tasks.py
|
umluizlima/email-sender
|
d952874918fc6edc896dabe6d1c1e1391f9d8697
|
[
"MIT"
] | 1
|
2021-11-07T18:52:54.000Z
|
2021-11-07T18:52:55.000Z
|
app/api/dependencies/tasks.py
|
umluizlima/email-sender
|
d952874918fc6edc896dabe6d1c1e1391f9d8697
|
[
"MIT"
] | 2
|
2020-05-24T16:49:07.000Z
|
2022-02-03T00:57:25.000Z
|
from fastapi import Depends
from app.core.tasks import get_tasks_producer
from app.settings import Settings, get_settings
def tasks_producer(settings: Settings = Depends(get_settings)):
return get_tasks_producer(settings)
| 25.444444
| 63
| 0.825328
| 32
| 229
| 5.6875
| 0.375
| 0.214286
| 0.175824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117904
| 229
| 8
| 64
| 28.625
| 0.90099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.6
| 0.2
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
d4adb904438e8aafacd920a7354fe64924bb8df0
| 24
|
py
|
Python
|
server/blogsley_flask/post/__init__.py
|
blogsley/blogsley-flask
|
96257fe8195888eba77c25868112810b5374ae9f
|
[
"MIT"
] | 2
|
2020-02-23T05:58:08.000Z
|
2020-05-03T18:11:19.000Z
|
server/blogsley_flask/post/__init__.py
|
blogsley/blogsley-flask
|
96257fe8195888eba77c25868112810b5374ae9f
|
[
"MIT"
] | 4
|
2021-06-08T22:47:07.000Z
|
2022-03-12T00:52:04.000Z
|
server/blogsley_flask/post/__init__.py
|
blogsley/blogsley-flask
|
96257fe8195888eba77c25868112810b5374ae9f
|
[
"MIT"
] | null | null | null |
from .entity import Post
| 24
| 24
| 0.833333
| 4
| 24
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d4d83621aa02c67144d0433a80f67468032a3b4f
| 78
|
py
|
Python
|
autompc/tuning/__init__.py
|
StochLab/autompc
|
657cf9c6ae6771b65b20fdcbaaadde31150afdff
|
[
"BSD-3-Clause"
] | 22
|
2021-06-05T21:10:46.000Z
|
2022-03-16T08:29:05.000Z
|
autompc/tuning/__init__.py
|
StochLab/autompc
|
657cf9c6ae6771b65b20fdcbaaadde31150afdff
|
[
"BSD-3-Clause"
] | 6
|
2022-02-21T03:10:33.000Z
|
2022-03-22T10:43:35.000Z
|
autompc/tuning/__init__.py
|
StochLab/autompc
|
657cf9c6ae6771b65b20fdcbaaadde31150afdff
|
[
"BSD-3-Clause"
] | 5
|
2021-10-14T16:55:50.000Z
|
2022-03-04T09:38:54.000Z
|
from .pipeline_tuner import PipelineTuner
from .model_tuner import ModelTuner
| 26
| 41
| 0.871795
| 10
| 78
| 6.6
| 0.7
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 78
| 2
| 42
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
be1de8d83d9064931ea2911c12de96e62a27c454
| 5,622
|
py
|
Python
|
transformers-master/examples/code/oneshot.py
|
VITA-Group/BERT-Tickets
|
4d8e0356939e7045e2f5ee908412a5026051d162
|
[
"MIT"
] | 95
|
2020-08-20T23:24:50.000Z
|
2022-03-03T16:56:51.000Z
|
oneshot.py
|
TAMU-VITA/BERT-Tickets
|
4d8e0356939e7045e2f5ee908412a5026051d162
|
[
"MIT"
] | 6
|
2020-10-07T16:35:29.000Z
|
2022-03-10T03:30:15.000Z
|
transformers-master/examples/oneshot.py
|
harrywuhust2022/BERT-Tickets
|
4d8e0356939e7045e2f5ee908412a5026051d162
|
[
"MIT"
] | 12
|
2020-08-20T23:25:06.000Z
|
2022-01-14T12:06:30.000Z
|
import argparse
from transformers import BertForMaskedLM, BertForSequenceClassification, BertForQuestionAnswering
from transformers import BertConfig
import torch.nn.utils.prune as prune
import numpy as np
import torch
parser = argparse.ArgumentParser(description='PyTorch Cifar10 Training')
parser.add_argument('--weight', default='pre', type=str, help='file_dir')
parser.add_argument('--model', default='glue', type=str, help='file_dir')
parser.add_argument('--rate', default=0.2, type=float, help='rate')
args = parser.parse_args()
def pruning_model(model,px):
parameters_to_prune =[]
for ii in range(12):
parameters_to_prune.append((model.bert.encoder.layer[ii].attention.self.query, 'weight'))
parameters_to_prune.append((model.bert.encoder.layer[ii].attention.self.key, 'weight'))
parameters_to_prune.append((model.bert.encoder.layer[ii].attention.self.value, 'weight'))
parameters_to_prune.append((model.bert.encoder.layer[ii].attention.output.dense, 'weight'))
parameters_to_prune.append((model.bert.encoder.layer[ii].intermediate.dense, 'weight'))
parameters_to_prune.append((model.bert.encoder.layer[ii].output.dense, 'weight'))
parameters_to_prune.append((model.bert.pooler.dense, 'weight'))
parameters_to_prune = tuple(parameters_to_prune)
prune.global_unstructured(
parameters_to_prune,
pruning_method=prune.L1Unstructured,
amount=px,
)
def see_weight_rate(model):
sum_list = 0
zero_sum = 0
for ii in range(12):
sum_list = sum_list+float(model.bert.encoder.layer[ii].attention.self.query.weight.nelement())
zero_sum = zero_sum+float(torch.sum(model.bert.encoder.layer[ii].attention.self.query.weight == 0))
sum_list = sum_list+float(model.bert.encoder.layer[ii].attention.self.key.weight.nelement())
zero_sum = zero_sum+float(torch.sum(model.bert.encoder.layer[ii].attention.self.key.weight == 0))
sum_list = sum_list+float(model.bert.encoder.layer[ii].attention.self.value.weight.nelement())
zero_sum = zero_sum+float(torch.sum(model.bert.encoder.layer[ii].attention.self.value.weight == 0))
sum_list = sum_list+float(model.bert.encoder.layer[ii].attention.output.dense.weight.nelement())
zero_sum = zero_sum+float(torch.sum(model.bert.encoder.layer[ii].attention.output.dense.weight == 0))
sum_list = sum_list+float(model.bert.encoder.layer[ii].intermediate.dense.weight.nelement())
zero_sum = zero_sum+float(torch.sum(model.bert.encoder.layer[ii].intermediate.dense.weight == 0))
sum_list = sum_list+float(model.bert.encoder.layer[ii].output.dense.weight.nelement())
zero_sum = zero_sum+float(torch.sum(model.bert.encoder.layer[ii].output.dense.weight == 0))
sum_list = sum_list+float(model.bert.pooler.dense.weight.nelement())
zero_sum = zero_sum+float(torch.sum(model.bert.pooler.dense.weight == 0))
return 100*zero_sum/sum_list
config = BertConfig.from_pretrained(
'bert-base-uncased'
)
if args.model == 'glue':
if args.weight == 'rand':
print('random')
model = BertForSequenceClassification(config=config)
output = 'random_prun/'
elif args.weight == 'pre':
model = BertForSequenceClassification.from_pretrained(
'bert-base-uncased',
from_tf=bool(".ckpt" in 'bert-base-uncased'),
config=config
)
output = 'pretrain_prun/'
pruning_model(model, args.rate)
zero = see_weight_rate(model)
print('zero rate', zero)
mask_dict = {}
weight_dict = {}
model_dict = model.state_dict()
for key in model_dict.keys():
if 'mask' in key:
mask_dict[key] = model_dict[key]
else:
weight_dict[key] = model_dict[key]
torch.save(mask_dict, output+'mask.pt')
torch.save(weight_dict, output+'weight.pt')
elif args.model == 'squad':
if args.weight == 'rand':
print('random')
model = BertForQuestionAnswering(config=config)
output = 'random_prun/'
elif args.weight == 'pre':
model = BertForQuestionAnswering.from_pretrained(
'bert-base-uncased',
from_tf=bool(".ckpt" in 'bert-base-uncased'),
config=config
)
output = 'pretrain_prun/'
pruning_model(model, args.rate)
zero = see_weight_rate(model)
print('zero rate', zero)
mask_dict = {}
weight_dict = {}
model_dict = model.state_dict()
for key in model_dict.keys():
if 'mask' in key:
mask_dict[key] = model_dict[key]
else:
weight_dict[key] = model_dict[key]
torch.save(mask_dict, output+'mask.pt')
torch.save(weight_dict, output+'weight.pt')
elif args.model == 'pretrain':
if args.weight == 'rand':
print('random')
model = BertForMaskedLM(config=config)
output = 'random_prun/'
elif args.weight == 'pre':
model = BertForMaskedLM.from_pretrained(
'bert-base-uncased',
from_tf=bool(".ckpt" in 'bert-base-uncased'),
config=config
)
output = 'pretrain_prun/'
pruning_model(model, args.rate)
zero = see_weight_rate(model)
print('zero rate', zero)
mask_dict = {}
weight_dict = {}
model_dict = model.state_dict()
for key in model_dict.keys():
if 'mask' in key:
mask_dict[key] = model_dict[key]
else:
weight_dict[key] = model_dict[key]
torch.save(mask_dict, output+'mask.pt')
torch.save(weight_dict, output+'weight.pt')
| 34.072727
| 109
| 0.66471
| 730
| 5,622
| 4.954795
| 0.132877
| 0.052253
| 0.079624
| 0.104507
| 0.797346
| 0.765828
| 0.765828
| 0.739287
| 0.709981
| 0.683992
| 0
| 0.004661
| 0.198684
| 5,622
| 164
| 110
| 34.280488
| 0.798224
| 0
| 0
| 0.524194
| 0
| 0
| 0.083437
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016129
| false
| 0
| 0.048387
| 0
| 0.072581
| 0.048387
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
078254c9916a6fb9dbfec95c12de8f28e37ca4cf
| 158
|
py
|
Python
|
book_1/branching.py
|
D-Mbithi/Real-Python-Course-Solutions
|
6e743af5f9f40260df8d42b667b3535caed9db3b
|
[
"MIT"
] | 1
|
2019-10-24T17:56:23.000Z
|
2019-10-24T17:56:23.000Z
|
book_1/branching.py
|
D-Mbithi/Real-Python-Course-Solutions
|
6e743af5f9f40260df8d42b667b3535caed9db3b
|
[
"MIT"
] | null | null | null |
book_1/branching.py
|
D-Mbithi/Real-Python-Course-Solutions
|
6e743af5f9f40260df8d42b667b3535caed9db3b
|
[
"MIT"
] | null | null | null |
num = 10
if num < 10:
print("number is less than 10")
elif num > 10:
print("number is greater than 10")
else:
print("the number is equal to 10")
| 17.555556
| 38
| 0.626582
| 28
| 158
| 3.535714
| 0.5
| 0.151515
| 0.20202
| 0.323232
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 0.259494
| 158
| 8
| 39
| 19.75
| 0.74359
| 0
| 0
| 0
| 0
| 0
| 0.455696
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
0786fd8c4e5cbc21a407156bcdc83ce19b2750fd
| 250
|
py
|
Python
|
grow/extensions/hooks/dev_handler_hook_test.py
|
akashkalal/grow
|
e4813efecb270e00c52c4bb1cb317766a8c92e29
|
[
"MIT"
] | 335
|
2016-04-02T20:12:21.000Z
|
2022-03-28T18:55:26.000Z
|
grow/extensions/hooks/dev_handler_hook_test.py
|
kmcnellis/grow
|
4787f5a01681ef0800e9b4388a56cdbc48209368
|
[
"MIT"
] | 784
|
2016-04-01T16:56:41.000Z
|
2022-03-05T01:25:34.000Z
|
grow/extensions/hooks/dev_handler_hook_test.py
|
kmcnellis/grow
|
4787f5a01681ef0800e9b4388a56cdbc48209368
|
[
"MIT"
] | 54
|
2016-05-03T13:06:15.000Z
|
2021-09-24T04:46:23.000Z
|
"""Tests for dev handler hook."""
import unittest
from grow.extensions.hooks import dev_handler_hook
class DevHandlerHookTestCase(unittest.TestCase):
"""Test the dev handler hook."""
def test_something(self):
"""?"""
pass
| 19.230769
| 50
| 0.68
| 29
| 250
| 5.758621
| 0.689655
| 0.179641
| 0.251497
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 250
| 12
| 51
| 20.833333
| 0.835
| 0.224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
078d3b92d7b8a75ff717c0523c0a22b73cae04d1
| 16,482
|
py
|
Python
|
src/discipleClass.py
|
TestowanieAutomatyczneUG/projekt-i-Darkstaron123
|
fe8c1e74eb73267ebb985bd030714250bb7adf67
|
[
"MIT"
] | null | null | null |
src/discipleClass.py
|
TestowanieAutomatyczneUG/projekt-i-Darkstaron123
|
fe8c1e74eb73267ebb985bd030714250bb7adf67
|
[
"MIT"
] | null | null | null |
src/discipleClass.py
|
TestowanieAutomatyczneUG/projekt-i-Darkstaron123
|
fe8c1e74eb73267ebb985bd030714250bb7adf67
|
[
"MIT"
] | null | null | null |
class DiscipleClass:
def displayAllDisciples(self,language):
import json
if (language == "EN"):
try:
with open('../data/data.txt') as json_file:
data = json.load(json_file)
for i in data['disciples']:
print('Id: ' + i['id'], end=" ")
print('First Name: ' + i['firstname'], end=" ")
print('Last Name: ' + i['lastname'], end=" ")
print('')
except:
pass
if (language == "PL"):
try:
with open('../data/data.txt') as json_file:
data = json.load(json_file)
for i in data['disciples']:
print('Id: ' + i['id'], end=" ")
print('Pierwsze Imie: ' + i['firstname'], end=" ")
print('Nazwisko: ' + i['lastname'], end=" ")
print('')
except:
pass
def chooseAndDisplayDisciple(self,language,choose=None):
import json
from discipleClass import DiscipleClass
from menuClass import MenuClass
if (language == "EN"):
print("Choose disciple by typing in his Id from list below.")
DiscipleClass().displayAllDisciples(language)
choose = str(input())
with open('../data/data.txt') as json_file:
data = json.load(json_file)
try:
data['disciples'][int(choose)]
except:
print("No disciple with provided id.")
return MenuClass().menu(language)
# print(data['disciples'][int(choose)])
print("Id:" + data['disciples'][int(choose)]['id'])
print("First Name:" + data['disciples'][int(choose)]['firstname'])
print("Last Name:" + data['disciples'][int(choose)]['lastname'])
print("Subjects:")
if(len(data['disciples'][int(choose)]['subjects'])>0):
for i in data['disciples'][int(choose)]['subjects']:
print(' ' + i['name'])
print(' Marks:')
if (len(i['marks'])> 0):
for ii in i['marks']:
print(' ' + ii, end=", ")
print('Average from marks of this subject:', end="")
print(str(DiscipleClass().calculateMarkAverageFromSubject(i['marks'])))
print('')
print("Average from averages of all subjects of this disciple:", end="")
print(str(DiscipleClass().calculateMarkAverageFromDisciple(data['disciples'][int(choose)])))
print("Notices(Quantity: "+str(DiscipleClass().countNumberOfNotices(data['disciples'][int(choose)]['notices']))+"):")
for i in data['disciples'][int(choose)]['notices']:
print(i)
print("Type in anything to return to menu")
input()
return MenuClass().menu(language)
if (language == "PL"):
print("Wybierz ucznia poprzez wpisanie jego Id z listy ponizej.")
DiscipleClass().displayAllDisciples(language)
choose = str(input())
with open('../data/data.txt') as json_file:
data = json.load(json_file)
try:
data['disciples'][int(choose)]
except:
print("Brak ucznia o podanym id.")
return MenuClass().menu(language)
# print(data['disciples'][int(choose)])
print("Id:" + data['disciples'][int(choose)]['id'])
print("Pierwsze Imie:" + data['disciples'][int(choose)]['firstname'])
print("Nazwisko:" + data['disciples'][int(choose)]['lastname'])
print("Przedmioty:")
if (len(data['disciples'][int(choose)]['subjects']) > 0):
for i in data['disciples'][int(choose)]['subjects']:
print(' ' + i['name'])
print(' Oceny:')
if (len(i['marks'])> 0):
for ii in i['marks']:
print(' ' + ii, end=", ")
print('Srednia z ocen tego przedmiotu:', end="")
print(str(DiscipleClass().calculateMarkAverageFromSubject(i['marks'])))
print('')
print("Srednia z ocen wszystkich przedmiotow tego ucznia:",end="")
print(str(DiscipleClass().calculateMarkAverageFromDisciple(data['disciples'][int(choose)])))
print("Uwagi(Ilosc: "+str(DiscipleClass().countNumberOfNotices(data['disciples'][int(choose)]['notices']))+"):")
for i in data['disciples'][int(choose)]['notices']:
print(i)
print("Wprowadz cokolwiek zeby wrocic do menu.")
input()
return MenuClass().menu(language)
def addDisciple(self,language):
import json
from menuClass import MenuClass
if (language == "EN"):
print("You entered process of adding disciple.")
print("Type in his first name.")
firstname = str(input())
print("Type in his last name.")
lastname = str(input())
with open('../data/data.txt') as json_file:
data = json.load(json_file)
with open('../data/data.txt', 'w') as outfile:
data['disciples'].append(
{
"id": str(len(data['disciples'])),
"firstname": firstname,
"lastname": lastname,
"subjects": [],
"notices":[],
}
)
json.dump(data, outfile)
return MenuClass().menu(language)
if (language == "PL"):
print("Weszles w proces dodawania ucznia.")
print("Wpisz jego pierwsze imie.")
firstname = str(input())
print("Wpisz jego nazwisko.")
lastname = str(input())
with open('../data/data.txt') as json_file:
data = json.load(json_file)
with open('../data/data.txt', 'w') as outfile:
data['disciples'].append(
{
"id": str(len(data['disciples'])),
"firstname": firstname,
"lastname": lastname,
"subjects": []
}
)
json.dump(data, outfile)
return MenuClass().menu(language)
def editDisciple(self,language):
import json
from menuClass import MenuClass
from discipleClass import DiscipleClass
from subjectClass import SubjectClass
try:
if (language == "EN"):
print("You entered process of editing disciple. Choose disciple by typing in his Id from list below.")
DiscipleClass().displayAllDisciples(language)
choose = str(input())
with open('../data/data.txt') as json_file:
data = json.load(json_file)
try:
data['disciples'][int(choose)]
except:
print("No disciple with provided id.")
return MenuClass().menu(language)
disciple = data['disciples'][int(choose)]
print("=>Choosen disciple.<=")
print("Id:" + disciple['id'])
print("First Name:" + disciple['firstname'])
print("Last Name:" + disciple['lastname'])
print("Subjects:")
for i in disciple['subjects']:
print(' ' + i['name'])
print(' Marks:')
for ii in i['marks']:
print(' ' + ii, end=", ")
print('')
print('Pick an option.')
print("0. Go back to menu.")
print("1. Type in disciple\'s new first name.")
print("2. Type in disciple\'s new last name.")
print("3. Add subject to disciple.")
print("4. Edit disciple\'s subject.")
print("5. Remove disciple\'s subject.")
print("6. Add notice to disciple.")
choose = str(input())
if (choose == "0"):
return MenuClass().menu(language)
elif (choose == "1"):
disciple['firstname'] = str(input())
elif (choose == "2"):
disciple['lastname'] = str(input())
elif (choose == "3"):
SubjectClass().addSubject(language, disciple['id'])
elif (choose == "4"):
SubjectClass().editSubject(language, disciple['id'])
elif (choose == "5"):
SubjectClass().removeSubject(language, disciple['id'])
elif (choose == "6"):
print("Type in notice.")
notice = str(input())
disciple['notices'].append(str(notice))
else:
print('You had a typo. Try again!')
return DiscipleClass().editDisciple(language)
with open('../data/data.txt', 'w') as outfile:
data['disciples'][int(disciple['id'])] = disciple
json.dump(data, outfile)
return DiscipleClass().editDisciple(language)
if (language == "PL"):
print("Weszles w proces edytowania ucznia. Wybierz ucznia poprzez wpisanie jego Id z listy ponizej.")
DiscipleClass().displayAllDisciples(language)
choose = str(input())
with open('../data/data.txt') as json_file:
data = json.load(json_file)
try:
data['disciples']
except:
print("Brak ucznia o podanym id.")
return MenuClass().menu(language)
disciple = data['disciples'][int(choose)]
print("=>Wybrany uczen.<=")
print("Id:" + disciple['id'])
print("Imie:" + disciple['firstname'])
print("Nazwisko:" + disciple['lastname'])
print("Przedmioty:")
for i in disciple['subjects']:
print(' ' + i['name'])
print(' Oceny:')
for ii in i['marks']:
print(' ' + ii, end=", ")
print('')
print('Wybierz opcje.')
print("0. Wroc do menu.")
print("1. Wpisz nowe imie dla wybranego ucznia.")
print("2. Wpisz nowe nazwisko dla wybranego ucznia.")
print("3. Dodaj przedmiot do wybranego ucznia.")
print("4. Edytuj przedmiot wybranego ucznia.")
print("5. Usun przedmiot wybranego ucznia.")
print("6. Dodaj uwage do ucznia.")
choose = str(input())
if (choose == "0"):
return MenuClass().menu(language)
elif (choose == "1"):
disciple['firstname'] = str(input())
elif (choose == "2"):
disciple['lastname'] = str(input())
elif (choose == "3"):
SubjectClass().addSubject(language, disciple['id'])
elif (choose == "4"):
SubjectClass().editSubject(language, disciple['id'])
elif (choose == "5"):
SubjectClass().removeSubject(language, disciple['id'])
elif (choose == "6"):
print("Wpisz uwage.")
notice = str(input())
disciple['notices'].append(str(notice))
else:
print('Miales literowke. Sproboj ponownie!')
return DiscipleClass().editDisciple(language)
with open('../data/data.txt', 'w') as outfile:
data['disciples'][int(disciple['id'])] = disciple
json.dump(data, outfile)
return DiscipleClass().editDisciple(language)
except:
return MenuClass().menu(language)
def removeDisciple(self,language):
import json
from menuClass import MenuClass
from discipleClass import DiscipleClass
if (language == "EN"):
print("You entered process of removing disciple. Choose disciple by typing in his Id from list below.")
DiscipleClass().displayAllDisciples(language)
choose = str(input())
try:
with open('../data/data.txt') as json_file:
data = json.load(json_file)
if (len(data['disciples']) > int(choose) and int(choose) >= 0):
with open('../data/data.txt', 'w') as outfile:
del data['disciples'][int(choose)]
number = 0 # reassigning id after deletion
for i in data['disciples']:
i['id'] = str(number)
number = number + 1
json.dump(data, outfile)
else:
return MenuClass().menu(language)
except:
raise Exception("Wrong Input.")
return MenuClass().menu(language)
if (language == "PL"):
print("Weszles w proces usuwania ucznia. Wybierz ucznia poprzez wpisanie jego Id z listy ponizej,")
DiscipleClass().displayAllDisciples(language)
choose=str(input())
try:
with open('../data/data.txt') as json_file:
data = json.load(json_file)
if (len(data['disciples']) > int(choose) and int(choose) >= 0):
with open('../data/data.txt', 'w') as outfile:
del data['disciples'][int(choose)]
number = 0 # reassigning id after deletion
for i in data['disciples']:
i['id'] = str(number)
number = number + 1
json.dump(data, outfile)
else:
return MenuClass().menu(language)
except:
raise Exception("Wrong Input.")
return MenuClass().menu(language)
def calculateMarkAverageFromDisciple(self,disciple):
if (len(disciple['subjects']) > 0):
sumSubject = 0
quantitySubject = 0
for i in disciple['subjects']:
if (len(i['marks']) > 0):
sumMark = 0
quantityMark = 0
for ii in i['marks']:
sumMark = sumMark + int(ii)
quantityMark = quantityMark + 1
averageMark = float(sumMark) / float(quantityMark)
sumSubject = sumSubject + averageMark
quantitySubject = quantitySubject + 1
averageSubject = float(sumSubject) / float(quantitySubject)
return averageSubject
def calculateMarkAverageFromSubject(self,marks):
if (len(marks) > 0):
sumMark = 0
quantityMark = 0
for ii in marks:
sumMark = sumMark + int(ii)
quantityMark = quantityMark + 1
averageMark = float(sumMark) / float(quantityMark)
return averageMark
def countNumberOfNotices(self,notices):
return len(notices)
| 48.763314
| 133
| 0.45668
| 1,420
| 16,482
| 5.28662
| 0.125352
| 0.065805
| 0.061809
| 0.079126
| 0.769548
| 0.744372
| 0.709338
| 0.709338
| 0.669508
| 0.62515
| 0
| 0.005218
| 0.418639
| 16,482
| 337
| 134
| 48.908012
| 0.77823
| 0.008191
| 0
| 0.739264
| 0
| 0
| 0.17808
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02454
| false
| 0.006135
| 0.039877
| 0.003067
| 0.134969
| 0.282209
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
07c606bc582b49dfc0af1a39d01c419d014d2805
| 9,588
|
py
|
Python
|
coba/tests/test_environments_core.py
|
VowpalWabbit/coba
|
f3ba37280ea6125dc334a501ba39b3d30696ef4b
|
[
"BSD-3-Clause"
] | 30
|
2020-08-06T22:17:34.000Z
|
2022-03-15T12:20:20.000Z
|
coba/tests/test_environments_core.py
|
VowpalWabbit/coba
|
f3ba37280ea6125dc334a501ba39b3d30696ef4b
|
[
"BSD-3-Clause"
] | 5
|
2021-02-25T02:06:22.000Z
|
2022-01-11T14:18:34.000Z
|
coba/tests/test_environments_core.py
|
VowpalWabbit/coba
|
f3ba37280ea6125dc334a501ba39b3d30696ef4b
|
[
"BSD-3-Clause"
] | 9
|
2020-11-25T19:55:44.000Z
|
2021-10-01T20:20:36.000Z
|
import unittest
import unittest.mock
from pathlib import Path
from coba.pipes import DiskIO
from coba.environments import Environments, Environment, Shuffle, Take
class TestEnvironment(Environment):
def __init__(self, id) -> None:
self._id = id
@property
def params(self):
return {'id':self._id}
def read(self):
return []
class Environments_Tests(unittest.TestCase):
def test_from_file_path(self):
if Path("coba/tests/.temp/from_file.env").exists():
Path("coba/tests/.temp/from_file.env").unlink()
try:
Path("coba/tests/.temp/from_file.env").write_text('{ "environments" : { "OpenmlSimulation": 150 } }')
env = Environments.from_file("coba/tests/.temp/from_file.env")
self.assertEqual(1 , len(env))
self.assertEqual(150 , env[0].params['openml'])
self.assertEqual(False, env[0].params['cat_as_str'])
finally:
if Path("coba/tests/.temp/from_file.env").exists():
Path("coba/tests/.temp/from_file.env").unlink()
def test_from_file_source(self):
if Path("coba/tests/.temp/from_file.env").exists():
Path("coba/tests/.temp/from_file.env").unlink()
try:
Path("coba/tests/.temp/from_file.env").write_text('{ "environments" : { "OpenmlSimulation": 150 } }')
env = Environments.from_file(DiskIO("coba/tests/.temp/from_file.env"))
self.assertEqual(1 , len(env))
self.assertEqual(150 , env[0].params['openml'])
self.assertEqual(False, env[0].params['cat_as_str'])
finally:
if Path("coba/tests/.temp/from_file.env").exists():
Path("coba/tests/.temp/from_file.env").unlink()
def test_from_linear_synthetic(self):
env = Environments.from_linear_synthetic(100,2,3,3,0,["xa"],2)
self.assertEqual(1 , len(env))
self.assertEqual(100 , len(list(env[0].read())))
self.assertEqual(2 , env[0].params['n_A'])
self.assertEqual(3 , env[0].params['n_C_phi'])
self.assertEqual(3 , env[0].params['n_A_phi'])
self.assertEqual(0 , env[0].params['r_noise'])
self.assertEqual(['xa'], env[0].params['X'])
self.assertEqual(2 , env[0].params['seed'])
def test_from_local_synthetic(self):
env = Environments.from_local_synthetic(100,2,1,10,2)
self.assertEqual(1 , len(env))
self.assertEqual(100, len(list(env[0].read())))
self.assertEqual(2 , env[0].params['n_A'])
self.assertEqual(10 , env[0].params['n_C'])
self.assertEqual(1 , env[0].params['n_C_phi'])
self.assertEqual(2 , env[0].params['seed'])
def test_from_openml_single(self):
env = Environments.from_openml(100,100,True,'regression')
self.assertEqual(1 , len(env))
self.assertEqual(100 , env[0].params['openml'])
self.assertEqual(True , env[0].params['cat_as_str'])
self.assertEqual('regression', env[0].params['openml_type'])
self.assertEqual(100 , env[0].params['openml_take'])
def test_from_openml_multi(self):
env = Environments.from_openml([100,200],100,True,'regression')
self.assertEqual(2 , len(env))
self.assertEqual(100 , env[0].params['openml'])
self.assertEqual(True , env[0].params['cat_as_str'])
self.assertEqual('regression', env[0].params['openml_type'])
self.assertEqual(100 , env[0].params['openml_take'])
self.assertEqual(200 , env[1].params['openml'])
self.assertEqual(True , env[1].params['cat_as_str'])
self.assertEqual('regression', env[1].params['openml_type'])
self.assertEqual(100 , env[1].params['openml_take'])
def test_init_args(self):
env = Environments(TestEnvironment('A'), TestEnvironment('B'))
self.assertEqual(2 , len(env))
self.assertEqual('A', env[0].params['id'])
self.assertEqual('B', env[1].params['id'])
def test_init_sequence_args(self):
env = Environments([TestEnvironment('A'), TestEnvironment('B')])
self.assertEqual(2 , len(env))
self.assertEqual('A', env[0].params['id'])
self.assertEqual('B', env[1].params['id'])
def test_init_empty_args(self):
env = Environments()
self.assertEqual(0 , len(env))
def test_iter(self):
envs = Environments([TestEnvironment('A'), TestEnvironment('B')])
for env,id in zip(envs,['A','B']):
self.assertEqual(id, env.params['id'])
def test_add(self):
envs_1 = Environments(TestEnvironment('A'))
envs_2 = Environments(TestEnvironment('B'))
envs_3 = envs_1+envs_2
self.assertEqual(1 , len(envs_1))
self.assertEqual('A', envs_1[0].params['id'])
self.assertEqual(1 , len(envs_2))
self.assertEqual('B', envs_2[0].params['id'])
self.assertEqual(2 , len(envs_3))
self.assertEqual('A', envs_3[0].params['id'])
self.assertEqual('B', envs_3[1].params['id'])
def test_str(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B'))
self.assertEqual(str(envs), f'1. {envs[0]}\n2. {envs[1]}')
def test_binary(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B')).binary()
self.assertEqual(2 , len(envs))
self.assertEqual('A' , envs[0].params['id'])
self.assertEqual(True, envs[0].params['binary'])
self.assertEqual('B' , envs[1].params['id'])
self.assertEqual(True, envs[1].params['binary'])
def test_shuffle(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B')).shuffle([1,2])
self.assertEqual(4 , len(envs))
self.assertEqual('A' , envs[0].params['id'])
self.assertEqual(1 , envs[0].params['shuffle'])
self.assertEqual('A' , envs[1].params['id'])
self.assertEqual(2 , envs[1].params['shuffle'])
self.assertEqual('B' , envs[2].params['id'])
self.assertEqual(1 , envs[2].params['shuffle'])
self.assertEqual('B' , envs[3].params['id'])
self.assertEqual(2 , envs[3].params['shuffle'])
def test_sparse(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B')).sparse(False,True)
self.assertEqual(2 , len(envs))
self.assertEqual('A' , envs[0].params['id'])
self.assertEqual(False, envs[0].params['sparse_C'])
self.assertEqual(True , envs[0].params['sparse_A'])
self.assertEqual('B' , envs[1].params['id'])
self.assertEqual(False, envs[1].params['sparse_C'])
self.assertEqual(True, envs[1].params['sparse_A'])
def test_take(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B')).take(1)
self.assertEqual(2 , len(envs))
self.assertEqual('A' , envs[0].params['id'])
self.assertEqual(1 , envs[0].params['take'])
self.assertEqual('B' , envs[1].params['id'])
self.assertEqual(1 , envs[1].params['take'])
def test_reservoir(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B')).reservoir(1,2)
self.assertEqual(2 , len(envs))
self.assertEqual('A' , envs[0].params['id'])
self.assertEqual(1 , envs[0].params['reservoir_count'])
self.assertEqual(2 , envs[0].params['reservoir_seed'])
self.assertEqual('B' , envs[1].params['id'])
self.assertEqual(1 , envs[1].params['reservoir_count'])
self.assertEqual(2 , envs[1].params['reservoir_seed'])
def test_singular_filter(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B')).filter(Shuffle(1))
self.assertEqual(2 , len(envs))
self.assertEqual('A' , envs[0].params['id'])
self.assertEqual(1 , envs[0].params['shuffle'])
self.assertEqual('B' , envs[1].params['id'])
self.assertEqual(1 , envs[1].params['shuffle'])
def test_sequence_filter(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B')).filter([Shuffle(1),Shuffle(2)])
self.assertEqual(4 , len(envs))
self.assertEqual('A' , envs[0].params['id'])
self.assertEqual(1 , envs[0].params['shuffle'])
self.assertEqual('A' , envs[1].params['id'])
self.assertEqual(2 , envs[1].params['shuffle'])
self.assertEqual('B' , envs[2].params['id'])
self.assertEqual(1 , envs[2].params['shuffle'])
self.assertEqual('B' , envs[3].params['id'])
self.assertEqual(2 , envs[3].params['shuffle'])
def test_two_step_filter(self):
envs = Environments(TestEnvironment('A'),TestEnvironment('B')).filter(Shuffle(1)).filter(Take(1))
self.assertEqual(2 , len(envs))
self.assertEqual('A' , envs[0].params['id'])
self.assertEqual(1 , envs[0].params['shuffle'])
self.assertEqual(1 , envs[0].params['take'])
self.assertEqual('B' , envs[1].params['id'])
self.assertEqual(1 , envs[1].params['shuffle'])
self.assertEqual(1 , envs[1].params['take'])
def test_ipython_display(self):
with unittest.mock.patch("builtins.print") as mock:
envs = Environments(TestEnvironment('A'),TestEnvironment('B'))
envs._ipython_display_()
mock.assert_called_once_with(f'1. {envs[0]}\n2. {envs[1]}')
if __name__ == '__main__':
unittest.main()
| 40.8
| 113
| 0.601481
| 1,204
| 9,588
| 4.680233
| 0.086379
| 0.276841
| 0.053239
| 0.102041
| 0.807453
| 0.760071
| 0.713043
| 0.604259
| 0.58598
| 0.561668
| 0
| 0.030631
| 0.220275
| 9,588
| 235
| 114
| 40.8
| 0.723114
| 0
| 0
| 0.494505
| 0
| 0
| 0.114298
| 0.037543
| 0
| 0
| 0
| 0
| 0.576923
| 1
| 0.131868
| false
| 0
| 0.027473
| 0.010989
| 0.181319
| 0.005495
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
07d4d3a9b7e00ad62ad2ebb073aa41828cf5cd27
| 18,564
|
py
|
Python
|
tests/fields/test_lazy_reference_field.py
|
shellcodesniper/mongoengine
|
d76cb345be98045cde0fa078569cc8021c0d0162
|
[
"MIT"
] | 3
|
2019-06-18T07:54:38.000Z
|
2022-01-22T23:27:41.000Z
|
tests/fields/test_lazy_reference_field.py
|
shellcodesniper/mongoengine
|
d76cb345be98045cde0fa078569cc8021c0d0162
|
[
"MIT"
] | 1
|
2022-01-22T23:27:23.000Z
|
2022-01-22T23:27:23.000Z
|
tests/fields/test_lazy_reference_field.py
|
shellcodesniper/mongoengine
|
d76cb345be98045cde0fa078569cc8021c0d0162
|
[
"MIT"
] | null | null | null |
from bson import DBRef, ObjectId
import pytest
from mongoengine import *
from mongoengine.base import LazyReference
from tests.utils import MongoDBTestCase
class TestLazyReferenceField(MongoDBTestCase):
def test_lazy_reference_config(self):
# Make sure ReferenceField only accepts a document class or a string
# with a document class name.
with pytest.raises(ValidationError):
LazyReferenceField(EmbeddedDocument)
def test___repr__(self):
class Animal(Document):
pass
class Ocurrence(Document):
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal()
oc = Ocurrence(animal=animal)
assert "LazyReference" in repr(oc.animal)
def test___getattr___unknown_attr_raises_attribute_error(self):
class Animal(Document):
pass
class Ocurrence(Document):
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal().save()
oc = Ocurrence(animal=animal)
with pytest.raises(AttributeError):
oc.animal.not_exist
def test_lazy_reference_simple(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
Ocurrence(person="test", animal=animal).save()
p = Ocurrence.objects.get()
assert isinstance(p.animal, LazyReference)
fetched_animal = p.animal.fetch()
assert fetched_animal == animal
# `fetch` keep cache on referenced document by default...
animal.tag = "not so heavy"
animal.save()
double_fetch = p.animal.fetch()
assert fetched_animal is double_fetch
assert double_fetch.tag == "heavy"
# ...unless specified otherwise
fetch_force = p.animal.fetch(force=True)
assert fetch_force is not fetched_animal
assert fetch_force.tag == "not so heavy"
def test_lazy_reference_fetch_invalid_ref(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
Ocurrence(person="test", animal=animal).save()
animal.delete()
p = Ocurrence.objects.get()
assert isinstance(p.animal, LazyReference)
with pytest.raises(DoesNotExist):
p.animal.fetch()
def test_lazy_reference_set(self):
class Animal(Document):
meta = {"allow_inheritance": True}
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
class SubAnimal(Animal):
nick = StringField()
animal = Animal(name="Leopard", tag="heavy").save()
sub_animal = SubAnimal(nick="doggo", name="dog").save()
for ref in (
animal,
animal.pk,
DBRef(animal._get_collection_name(), animal.pk),
LazyReference(Animal, animal.pk),
sub_animal,
sub_animal.pk,
DBRef(sub_animal._get_collection_name(), sub_animal.pk),
LazyReference(SubAnimal, sub_animal.pk),
):
p = Ocurrence(person="test", animal=ref).save()
p.reload()
assert isinstance(p.animal, LazyReference)
p.animal.fetch()
def test_lazy_reference_bad_set(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
class BadDoc(Document):
pass
animal = Animal(name="Leopard", tag="heavy").save()
baddoc = BadDoc().save()
for bad in (
42,
"foo",
baddoc,
DBRef(baddoc._get_collection_name(), animal.pk),
LazyReference(BadDoc, animal.pk),
):
with pytest.raises(ValidationError):
Ocurrence(person="test", animal=bad).save()
def test_lazy_reference_query_conversion(self):
"""Ensure that LazyReferenceFields can be queried using objects and values
of the type of the primary key of the referenced object.
"""
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = LazyReferenceField(Member, dbref=False)
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title="post 1", author=m1)
post1.save()
post2 = BlogPost(title="post 2", author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
assert post.id == post1.id
post = BlogPost.objects(author=m2).first()
assert post.id == post2.id
# Same thing by passing a LazyReference instance
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
assert post.id == post2.id
def test_lazy_reference_query_conversion_dbref(self):
"""Ensure that LazyReferenceFields can be queried using objects and values
of the type of the primary key of the referenced object.
"""
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = LazyReferenceField(Member, dbref=True)
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title="post 1", author=m1)
post1.save()
post2 = BlogPost(title="post 2", author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
assert post.id == post1.id
post = BlogPost.objects(author=m2).first()
assert post.id == post2.id
# Same thing by passing a LazyReference instance
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
assert post.id == post2.id
def test_lazy_reference_passthrough(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
animal = LazyReferenceField(Animal, passthrough=False)
animal_passthrough = LazyReferenceField(Animal, passthrough=True)
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
Ocurrence(animal=animal, animal_passthrough=animal).save()
p = Ocurrence.objects.get()
assert isinstance(p.animal, LazyReference)
with pytest.raises(KeyError):
p.animal["name"]
with pytest.raises(AttributeError):
p.animal.name
assert p.animal.pk == animal.pk
assert p.animal_passthrough.name == "Leopard"
assert p.animal_passthrough["name"] == "Leopard"
# Should not be able to access referenced document's methods
with pytest.raises(AttributeError):
p.animal.save
with pytest.raises(KeyError):
p.animal["save"]
def test_lazy_reference_not_set(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
Ocurrence(person="foo").save()
p = Ocurrence.objects.get()
assert p.animal is None
def test_lazy_reference_equality(self):
class Animal(Document):
name = StringField()
tag = StringField()
Animal.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
animalref = LazyReference(Animal, animal.pk)
assert animal == animalref
assert animalref == animal
other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90"))
assert animal != other_animalref
assert other_animalref != animal
def test_lazy_reference_embedded(self):
class Animal(Document):
name = StringField()
tag = StringField()
class EmbeddedOcurrence(EmbeddedDocument):
in_list = ListField(LazyReferenceField(Animal))
direct = LazyReferenceField(Animal)
class Ocurrence(Document):
in_list = ListField(LazyReferenceField(Animal))
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
direct = LazyReferenceField(Animal)
Animal.drop_collection()
Ocurrence.drop_collection()
animal1 = Animal(name="doggo").save()
animal2 = Animal(name="cheeta").save()
def check_fields_type(occ):
assert isinstance(occ.direct, LazyReference)
for elem in occ.in_list:
assert isinstance(elem, LazyReference)
assert isinstance(occ.in_embedded.direct, LazyReference)
for elem in occ.in_embedded.in_list:
assert isinstance(elem, LazyReference)
occ = Ocurrence(
in_list=[animal1, animal2],
in_embedded={"in_list": [animal1, animal2], "direct": animal1},
direct=animal1,
).save()
check_fields_type(occ)
occ.reload()
check_fields_type(occ)
occ.direct = animal1.id
occ.in_list = [animal1.id, animal2.id]
occ.in_embedded.direct = animal1.id
occ.in_embedded.in_list = [animal1.id, animal2.id]
check_fields_type(occ)
class TestGenericLazyReferenceField(MongoDBTestCase):
def test_generic_lazy_reference_simple(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField()
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard", tag="heavy").save()
Ocurrence(person="test", animal=animal).save()
p = Ocurrence.objects.get()
assert isinstance(p.animal, LazyReference)
fetched_animal = p.animal.fetch()
assert fetched_animal == animal
# `fetch` keep cache on referenced document by default...
animal.tag = "not so heavy"
animal.save()
double_fetch = p.animal.fetch()
assert fetched_animal is double_fetch
assert double_fetch.tag == "heavy"
# ...unless specified otherwise
fetch_force = p.animal.fetch(force=True)
assert fetch_force is not fetched_animal
assert fetch_force.tag == "not so heavy"
def test_generic_lazy_reference_choices(self):
class Animal(Document):
name = StringField()
class Vegetal(Document):
name = StringField()
class Mineral(Document):
name = StringField()
class Ocurrence(Document):
living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal])
thing = GenericLazyReferenceField()
Animal.drop_collection()
Vegetal.drop_collection()
Mineral.drop_collection()
Ocurrence.drop_collection()
animal = Animal(name="Leopard").save()
vegetal = Vegetal(name="Oak").save()
mineral = Mineral(name="Granite").save()
occ_animal = Ocurrence(living_thing=animal, thing=animal).save()
_ = Ocurrence(living_thing=vegetal, thing=vegetal).save()
with pytest.raises(ValidationError):
Ocurrence(living_thing=mineral).save()
occ = Ocurrence.objects.get(living_thing=animal)
assert occ == occ_animal
assert isinstance(occ.thing, LazyReference)
assert isinstance(occ.living_thing, LazyReference)
occ.thing = vegetal
occ.living_thing = vegetal
occ.save()
occ.thing = mineral
occ.living_thing = mineral
with pytest.raises(ValidationError):
occ.save()
def test_generic_lazy_reference_set(self):
class Animal(Document):
meta = {"allow_inheritance": True}
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField()
Animal.drop_collection()
Ocurrence.drop_collection()
class SubAnimal(Animal):
nick = StringField()
animal = Animal(name="Leopard", tag="heavy").save()
sub_animal = SubAnimal(nick="doggo", name="dog").save()
for ref in (
animal,
LazyReference(Animal, animal.pk),
{"_cls": "Animal", "_ref": DBRef(animal._get_collection_name(), animal.pk)},
sub_animal,
LazyReference(SubAnimal, sub_animal.pk),
{
"_cls": "SubAnimal",
"_ref": DBRef(sub_animal._get_collection_name(), sub_animal.pk),
},
):
p = Ocurrence(person="test", animal=ref).save()
p.reload()
assert isinstance(p.animal, (LazyReference, Document))
p.animal.fetch()
def test_generic_lazy_reference_bad_set(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField(choices=["Animal"])
Animal.drop_collection()
Ocurrence.drop_collection()
class BadDoc(Document):
pass
animal = Animal(name="Leopard", tag="heavy").save()
baddoc = BadDoc().save()
for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)):
with pytest.raises(ValidationError):
Ocurrence(person="test", animal=bad).save()
def test_generic_lazy_reference_query_conversion(self):
class Member(Document):
user_num = IntField(primary_key=True)
class BlogPost(Document):
title = StringField()
author = GenericLazyReferenceField()
Member.drop_collection()
BlogPost.drop_collection()
m1 = Member(user_num=1)
m1.save()
m2 = Member(user_num=2)
m2.save()
post1 = BlogPost(title="post 1", author=m1)
post1.save()
post2 = BlogPost(title="post 2", author=m2)
post2.save()
post = BlogPost.objects(author=m1).first()
assert post.id == post1.id
post = BlogPost.objects(author=m2).first()
assert post.id == post2.id
# Same thing by passing a LazyReference instance
post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first()
assert post.id == post2.id
def test_generic_lazy_reference_not_set(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField()
Animal.drop_collection()
Ocurrence.drop_collection()
Ocurrence(person="foo").save()
p = Ocurrence.objects.get()
assert p.animal is None
def test_generic_lazy_reference_accepts_string_instead_of_class(self):
class Animal(Document):
name = StringField()
tag = StringField()
class Ocurrence(Document):
person = StringField()
animal = GenericLazyReferenceField("Animal")
Animal.drop_collection()
Ocurrence.drop_collection()
animal = Animal().save()
Ocurrence(animal=animal).save()
p = Ocurrence.objects.get()
assert p.animal == animal
def test_generic_lazy_reference_embedded(self):
class Animal(Document):
name = StringField()
tag = StringField()
class EmbeddedOcurrence(EmbeddedDocument):
in_list = ListField(GenericLazyReferenceField())
direct = GenericLazyReferenceField()
class Ocurrence(Document):
in_list = ListField(GenericLazyReferenceField())
in_embedded = EmbeddedDocumentField(EmbeddedOcurrence)
direct = GenericLazyReferenceField()
Animal.drop_collection()
Ocurrence.drop_collection()
animal1 = Animal(name="doggo").save()
animal2 = Animal(name="cheeta").save()
def check_fields_type(occ):
assert isinstance(occ.direct, LazyReference)
for elem in occ.in_list:
assert isinstance(elem, LazyReference)
assert isinstance(occ.in_embedded.direct, LazyReference)
for elem in occ.in_embedded.in_list:
assert isinstance(elem, LazyReference)
occ = Ocurrence(
in_list=[animal1, animal2],
in_embedded={"in_list": [animal1, animal2], "direct": animal1},
direct=animal1,
).save()
check_fields_type(occ)
occ.reload()
check_fields_type(occ)
animal1_ref = {
"_cls": "Animal",
"_ref": DBRef(animal1._get_collection_name(), animal1.pk),
}
animal2_ref = {
"_cls": "Animal",
"_ref": DBRef(animal2._get_collection_name(), animal2.pk),
}
occ.direct = animal1_ref
occ.in_list = [animal1_ref, animal2_ref]
occ.in_embedded.direct = animal1_ref
occ.in_embedded.in_list = [animal1_ref, animal2_ref]
check_fields_type(occ)
| 32.229167
| 88
| 0.605365
| 1,858
| 18,564
| 5.89774
| 0.093649
| 0.052382
| 0.037781
| 0.035682
| 0.802884
| 0.755521
| 0.702865
| 0.690546
| 0.679139
| 0.66253
| 0
| 0.008777
| 0.294225
| 18,564
| 575
| 89
| 32.285217
| 0.827584
| 0.039054
| 0
| 0.75286
| 0
| 0
| 0.027608
| 0.001349
| 0
| 0
| 0
| 0
| 0.107551
| 1
| 0.052632
| false
| 0.022883
| 0.011442
| 0
| 0.176201
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
07e991fb186447e1d70582fdad760abc2387e3c6
| 32
|
py
|
Python
|
demo/build/broken.py
|
robopsi/calm
|
1acf0847015e0e5d6fe2f14f30970fa69267c3a9
|
[
"BSD-3-Clause"
] | 18
|
2017-07-03T21:18:58.000Z
|
2017-10-31T11:27:28.000Z
|
demo/build/broken.py
|
mitsuhiko/calm
|
1acf0847015e0e5d6fe2f14f30970fa69267c3a9
|
[
"BSD-3-Clause"
] | 1
|
2017-07-05T20:24:37.000Z
|
2017-07-05T20:24:37.000Z
|
demo/build/broken.py
|
mitsuhiko/calm
|
1acf0847015e0e5d6fe2f14f30970fa69267c3a9
|
[
"BSD-3-Clause"
] | 3
|
2017-07-12T18:26:26.000Z
|
2021-03-03T11:38:49.000Z
|
class badlynamedclass:
pass
| 10.666667
| 22
| 0.75
| 3
| 32
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21875
| 32
| 2
| 23
| 16
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6afaea622c3622fa84c75e4324856bf6b6ef1013
| 58
|
py
|
Python
|
te.py
|
wu-xiu-shu/test
|
535f3425c8f997a54d637793949366e9a1f07141
|
[
"Apache-2.0"
] | null | null | null |
te.py
|
wu-xiu-shu/test
|
535f3425c8f997a54d637793949366e9a1f07141
|
[
"Apache-2.0"
] | null | null | null |
te.py
|
wu-xiu-shu/test
|
535f3425c8f997a54d637793949366e9a1f07141
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#coding: utf-8
import re
import urllib
| 9.666667
| 17
| 0.724138
| 10
| 58
| 4.2
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.137931
| 58
| 5
| 18
| 11.6
| 0.82
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ed02d06936dff8133836d40c280f2d78278d44a1
| 216
|
py
|
Python
|
demo/apps/soakinspecks/admin.py
|
mysidewalk/pykc-meetup1
|
258dfb8debc46174c089cfb0583cb88b03fbbe07
|
[
"MIT"
] | null | null | null |
demo/apps/soakinspecks/admin.py
|
mysidewalk/pykc-meetup1
|
258dfb8debc46174c089cfb0583cb88b03fbbe07
|
[
"MIT"
] | null | null | null |
demo/apps/soakinspecks/admin.py
|
mysidewalk/pykc-meetup1
|
258dfb8debc46174c089cfb0583cb88b03fbbe07
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from soakinspecks.models import *
admin.site.register(Flavor)
admin.site.register(MixturePart)
admin.site.register(Mixture)
admin.site.register(Inventory)
admin.site.register(Order)
| 24
| 33
| 0.828704
| 29
| 216
| 6.172414
| 0.482759
| 0.251397
| 0.47486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064815
| 216
| 8
| 34
| 27
| 0.886139
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.285714
| 0
| 0.285714
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ed2cb7dc9bfb2043db7d2ec8d448e09704bf7602
| 703
|
py
|
Python
|
hydra/core/environment/basic/resources.py
|
rpacholek/hydra
|
60e3c2eec5ab1fd1dde8e510baa5175173c66a6a
|
[
"MIT"
] | null | null | null |
hydra/core/environment/basic/resources.py
|
rpacholek/hydra
|
60e3c2eec5ab1fd1dde8e510baa5175173c66a6a
|
[
"MIT"
] | null | null | null |
hydra/core/environment/basic/resources.py
|
rpacholek/hydra
|
60e3c2eec5ab1fd1dde8e510baa5175173c66a6a
|
[
"MIT"
] | null | null | null |
import platform
from ..common.EnvVisibility import *
class CPUResource:
def get_name():
return "CPU"
@resource
def get_architecture():
return "architecture", platform.machine(), Public
@resource
def get_processor_type():
return "type", platform.processor(), Protected
@resource
def get_cores():
return "core_number", multiprocessing.cpu_count(), Public
class MemoryResource:
def get_name():
return "memory"
@resource
def get_memory_size():
# TODO
return "total_memory", 0, Public
"""
@dynamic_resouce
def get_available_memory():
return "available_memory", 0, Protected
"""
| 19
| 65
| 0.631579
| 72
| 703
| 5.958333
| 0.458333
| 0.097902
| 0.130536
| 0.074592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003876
| 0.266003
| 703
| 36
| 66
| 19.527778
| 0.827519
| 0.00569
| 0
| 0.3
| 0
| 0
| 0.082051
| 0
| 0
| 0
| 0
| 0.027778
| 0
| 1
| 0.3
| true
| 0
| 0.1
| 0.3
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
ed425abce1e8e7745656b24f5646ed9ec3f76afa
| 37
|
py
|
Python
|
card_site/__init__.py
|
WiiLink24/Digicard
|
a6bbefb43aec2c31474380fe8f13cc7fc5939e7d
|
[
"MIT"
] | null | null | null |
card_site/__init__.py
|
WiiLink24/Digicard
|
a6bbefb43aec2c31474380fe8f13cc7fc5939e7d
|
[
"MIT"
] | null | null | null |
card_site/__init__.py
|
WiiLink24/Digicard
|
a6bbefb43aec2c31474380fe8f13cc7fc5939e7d
|
[
"MIT"
] | 1
|
2021-09-26T12:18:39.000Z
|
2021-09-26T12:18:39.000Z
|
from . import assets, routes, upload
| 18.5
| 36
| 0.756757
| 5
| 37
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162162
| 37
| 1
| 37
| 37
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ed4ad957a3a116652bee38b4bbc32d91cc0bab4e
| 1,053
|
py
|
Python
|
flockos/apis/users.py
|
bilmyers/pyflock
|
b440ffbcd6a18c0d81b81dcdcbae7ae16c025d39
|
[
"Apache-2.0"
] | 14
|
2017-02-14T07:02:59.000Z
|
2022-03-30T13:59:59.000Z
|
flockos/apis/users.py
|
bilmyers/pyflock
|
b440ffbcd6a18c0d81b81dcdcbae7ae16c025d39
|
[
"Apache-2.0"
] | 10
|
2016-10-22T20:52:00.000Z
|
2021-05-10T10:40:30.000Z
|
flockos/apis/users.py
|
bilmyers/pyflock
|
b440ffbcd6a18c0d81b81dcdcbae7ae16c025d39
|
[
"Apache-2.0"
] | 8
|
2017-03-03T13:16:34.000Z
|
2020-07-23T17:59:54.000Z
|
# coding: utf-8
# python 2 and python 3 compatibility library
from six import iteritems
from ..api_client import call_api
def get_info(token, **kwargs):
"""
This method makes a synchronous HTTP request.
:param str token: (required)
:return: response dict
"""
params = locals()
for key, val in iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
resource_path = '/users.getInfo'.replace('{format}', 'json')
response = call_api(resource_path, params=params)
return response
def get_public_profile(token, user_id, **kwargs):
"""
This method makes a synchronous HTTP request.
:param str token: (required)
:param str user_id: (required)
:return: response dict
"""
params = locals()
for key, val in iteritems(params['kwargs']):
params[key] = val
del params['kwargs']
resource_path = '/users.getPublicProfile'.replace('{format}', 'json')
response = call_api(resource_path, params=params)
return response
| 23.4
| 73
| 0.645774
| 128
| 1,053
| 5.210938
| 0.414063
| 0.083958
| 0.047976
| 0.062969
| 0.734633
| 0.734633
| 0.734633
| 0.734633
| 0.734633
| 0.734633
| 0
| 0.003727
| 0.235518
| 1,053
| 44
| 74
| 23.931818
| 0.824845
| 0.273504
| 0
| 0.666667
| 0
| 0
| 0.122832
| 0.033237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9c0aadaede18f31fdf23d874beaa62ac6e976b5a
| 289
|
py
|
Python
|
gemini/__init__.py
|
randyesq/gemini-python
|
6ced9d9620f39ce64e032f3033b9e89d472d95a2
|
[
"MIT"
] | null | null | null |
gemini/__init__.py
|
randyesq/gemini-python
|
6ced9d9620f39ce64e032f3033b9e89d472d95a2
|
[
"MIT"
] | null | null | null |
gemini/__init__.py
|
randyesq/gemini-python
|
6ced9d9620f39ce64e032f3033b9e89d472d95a2
|
[
"MIT"
] | null | null | null |
from .public_client import PublicClient
from .private_client import PrivateClient
try:
from .basewebsocket import BaseWebSocket
from .marketdataws import MarketDataWS
from .ordereventsws import OrderEventsWS
except ImportError:
pass
from .order_book import GeminiOrderBook
| 28.9
| 44
| 0.820069
| 31
| 289
| 7.548387
| 0.548387
| 0.102564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152249
| 289
| 9
| 45
| 32.111111
| 0.955102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.111111
| 0.777778
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
9c2c0d50bedd444ba002c6c29900bc8d7e282337
| 14,648
|
py
|
Python
|
applications/FluidDynamicsApplication/tests/two_fluid_hydrostatic_pool_test.py
|
AndreaVoltan/MyKratos7.0
|
e977752722e8ef1b606f25618c4bf8fd04c434cc
|
[
"BSD-4-Clause"
] | 2
|
2020-04-30T19:13:08.000Z
|
2021-04-14T19:40:47.000Z
|
applications/FluidDynamicsApplication/tests/two_fluid_hydrostatic_pool_test.py
|
AndreaVoltan/MyKratos7.0
|
e977752722e8ef1b606f25618c4bf8fd04c434cc
|
[
"BSD-4-Clause"
] | 1
|
2020-04-30T19:19:09.000Z
|
2020-05-02T14:22:36.000Z
|
applications/FluidDynamicsApplication/tests/two_fluid_hydrostatic_pool_test.py
|
AndreaVoltan/MyKratos7.0
|
e977752722e8ef1b606f25618c4bf8fd04c434cc
|
[
"BSD-4-Clause"
] | 1
|
2020-06-12T08:51:24.000Z
|
2020-06-12T08:51:24.000Z
|
from __future__ import print_function, absolute_import, division #makes KratosMultiphysics backward compatible with python 2.6 and 2.7
import KratosMultiphysics
import KratosMultiphysics.FluidDynamicsApplication
from fluid_dynamics_analysis import FluidDynamicsAnalysis
import KratosMultiphysics.kratos_utilities as kratos_utils
try:
import KratosMultiphysics.ExternalSolversApplication
have_external_solvers = True
except ImportError as e:
have_external_solvers = False
import time
import os
import sys
import KratosMultiphysics.KratosUnittest as UnitTest
# Class to navigate through the folders
class WorkFolderScope:
def __init__(self, work_folder):
self.currentPath = os.getcwd()
self.scope = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)),work_folder))
def __enter__(self):
os.chdir(self.scope)
# Class derived from the UnitTest (KratosMultiphysics.KratosUnittest) class
class TwoFluidHydrostaticPoolTest(UnitTest.TestCase):
def __init__(self):
self.waterLevel = 0.5
self.work_folder = "TwoFluidStaticPoolTest"
self.settings = "TwoFluidStaticPoolTest2D.json"
self.check_tolerance = 1e-10
self.check_toleranceDistance = 0.05
self.gravitationalAcceleration = 9.81
self.domainHeight = 1.0
self.rho1 = 1000.0
self.rho2 = 1.0
# switch here for output
self.print_output = False
# runs the two dimensinal test case
def runTwoFluidHydrostaticTest2D(self):
with open("TwoFluidStaticPoolTest/TwoFluidStaticPoolTest2D.json",'r') as parameter_file:
parameters = KratosMultiphysics.Parameters(parameter_file.read())
model = KratosMultiphysics.Model()
if self.print_output:
parameters["output_processes"].AddValue("gid_output", KratosMultiphysics.Parameters(R'''[{
"python_module" : "gid_output_process",
"kratos_module" : "KratosMultiphysics",
"process_name" : "GiDOutputProcess",
"help" : "This process writes postprocessing files for GiD",
"Parameters" : {
"model_part_name" : "FluidModelPart",
"output_name" : "FluidModelPart",
"postprocess_parameters" : {
"result_file_configuration" : {
"gidpost_flags" : {
"GiDPostMode" : "GiD_PostBinary",
"WriteDeformedMeshFlag" : "WriteDeformed",
"WriteConditionsFlag" : "WriteConditions",
"MultiFileFlag" : "SingleFile"
},
"file_label" : "time",
"output_control_type" : "time",
"output_frequency" : 0.1,
"body_output" : true,
"node_output" : false,
"skin_output" : false,
"plane_output" : [],
"nodal_results" : ["VELOCITY","PRESSURE","DISTANCE","DENSITY","DYNAMIC_VISCOSITY"],
"gauss_point_results" : []
},
"point_data_configuration" : []
}
}
}]''') )
# running
self.simulation = FluidDynamicsAnalysisWithFlush2D(model,parameters)
self.simulation.Run()
# testing
for node in self.simulation._GetSolver().GetComputingModelPart().Nodes:
velocity = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY)
self.assertAlmostEqual(0.0, velocity[0], delta = self.check_tolerance)
self.assertAlmostEqual(0.0, velocity[1], delta = self.check_tolerance)
self.assertAlmostEqual(0.0, velocity[2], delta = self.check_tolerance)
pressure = node.GetSolutionStepValue(KratosMultiphysics.PRESSURE)
if node.Y > self.waterLevel:
pressureAnalytic = (self.domainHeight-node.Y)*self.gravitationalAcceleration*self.rho2
else:
pressureAnalytic = (self.domainHeight-self.waterLevel)*self.gravitationalAcceleration*self.rho2
pressureAnalytic += (self.waterLevel-node.Y)*self.gravitationalAcceleration*self.rho1
self.assertAlmostEqual(pressureAnalytic, pressure, delta = self.check_tolerance)
distance = node.GetSolutionStepValue(KratosMultiphysics.DISTANCE)
distanceAnalytic = (node.Y - self.waterLevel)
self.assertAlmostEqual(distanceAnalytic, distance, delta = self.check_toleranceDistance)
kratos_utils.DeleteFileIfExisting('TwoFluidStaticPoolTest2D.post.bin')
kratos_utils.DeleteFileIfExisting('tests.post.lst')
# runs the three dimensional test case
def runTwoFluidHydrostaticTest3D(self):
with open("TwoFluidStaticPoolTest/TwoFluidStaticPoolTest3D.json",'r') as parameter_file:
parameters = KratosMultiphysics.Parameters(parameter_file.read())
model = KratosMultiphysics.Model()
if self.print_output:
parameters["output_processes"].AddValue("gid_output", KratosMultiphysics.Parameters(R'''[{
"python_module" : "gid_output_process",
"kratos_module" : "KratosMultiphysics",
"process_name" : "GiDOutputProcess",
"help" : "This process writes postprocessing files for GiD",
"Parameters" : {
"model_part_name" : "FluidModelPart",
"output_name" : "FluidModelPart",
"postprocess_parameters" : {
"result_file_configuration" : {
"gidpost_flags" : {
"GiDPostMode" : "GiD_PostBinary",
"WriteDeformedMeshFlag" : "WriteDeformed",
"WriteConditionsFlag" : "WriteConditions",
"MultiFileFlag" : "SingleFile"
},
"file_label" : "time",
"output_control_type" : "time",
"output_frequency" : 0.1,
"body_output" : true,
"node_output" : false,
"skin_output" : false,
"plane_output" : [],
"nodal_results" : ["VELOCITY","PRESSURE","DISTANCE","DENSITY","DYNAMIC_VISCOSITY"],
"gauss_point_results" : []
},
"point_data_configuration" : []
}
}
}]''') )
# running
self.simulation = FluidDynamicsAnalysisWithFlush3D(model,parameters)
self.simulation.Run()
# testing
for node in self.simulation._GetSolver().GetComputingModelPart().Nodes:
velocity = node.GetSolutionStepValue(KratosMultiphysics.VELOCITY)
self.assertAlmostEqual(0.0, velocity[0], delta = self.check_tolerance)
self.assertAlmostEqual(0.0, velocity[1], delta = self.check_tolerance)
self.assertAlmostEqual(0.0, velocity[2], delta = self.check_tolerance)
pressure = node.GetSolutionStepValue(KratosMultiphysics.PRESSURE)
if node.Z > self.waterLevel:
pressureAnalytic = (self.domainHeight-node.Z)*self.gravitationalAcceleration*self.rho2
else:
pressureAnalytic = (self.domainHeight-self.waterLevel)*self.gravitationalAcceleration*self.rho2
pressureAnalytic += (self.waterLevel-node.Z)*self.gravitationalAcceleration*self.rho1
self.assertAlmostEqual(pressureAnalytic, pressure, delta = self.check_tolerance)
distance = node.GetSolutionStepValue(KratosMultiphysics.DISTANCE)
distanceAnalytic = (node.Z - self.waterLevel)
self.assertAlmostEqual(distanceAnalytic, distance, delta = self.check_toleranceDistance)
kratos_utils.DeleteFileIfExisting('TwoFluidStaticPoolTest3D.post.bin')
kratos_utils.DeleteFileIfExisting('tests.post.lst')
class FluidDynamicsAnalysisWithFlush2D(FluidDynamicsAnalysis):
def __init__(self,model,project_parameters,flush_frequency=10.0):
super(FluidDynamicsAnalysisWithFlush2D,self).__init__(model,project_parameters)
self.flush_frequency = flush_frequency
self.last_flush = time.time()
def ModifyInitialGeometry(self):
init_h = 0.5
for node in self._GetSolver().GetComputingModelPart().Nodes:
distance = node.Y - init_h
node.SetSolutionStepValue(KratosMultiphysics.DISTANCE, distance)
def ApplyBoundaryConditions(self):
v_zero = KratosMultiphysics.Vector(3,0.0)
for node in self._GetSolver().GetComputingModelPart().Nodes:
if abs(node.X) > 0.499 and abs(node.X) < 0.501:
node.Fix(KratosMultiphysics.VELOCITY_X)
node.Fix(KratosMultiphysics.VELOCITY_Y)
node.SetSolutionStepValue(KratosMultiphysics.VELOCITY_X, 0.0)
# node.SetSolutionStepValue(KratosMultiphysics.VELOCITY, v_zero)
if abs(node.Y) > -0.001 and abs(node.Y) < 0.001:
node.Fix(KratosMultiphysics.VELOCITY_X)
node.Fix(KratosMultiphysics.VELOCITY_Y)
node.SetSolutionStepValue(KratosMultiphysics.VELOCITY_Y, 0.0)
# node.SetSolutionStepValue(KratosMultiphysics.VELOCITY, v_zero)
if abs(node.Y) > 0.999 and abs(node.Y) < 1.001:
node.Fix(KratosMultiphysics.VELOCITY_X)
node.Fix(KratosMultiphysics.VELOCITY_Y)
node.SetSolutionStepValue(KratosMultiphysics.VELOCITY_Y, 0.0)
# node.SetSolutionStepValue(KratosMultiphysics.VELOCITY, v_zero)
nFix = 1
v_zero = KratosMultiphysics.Vector(3,0.0)
self._GetSolver().GetComputingModelPart().GetNode(nFix).Fix(KratosMultiphysics.VELOCITY_X)
self._GetSolver().GetComputingModelPart().GetNode(nFix).Fix(KratosMultiphysics.VELOCITY_Y)
self._GetSolver().GetComputingModelPart().GetNode(nFix).SetSolutionStepValue(KratosMultiphysics.VELOCITY, v_zero)
self._GetSolver().GetComputingModelPart().GetNode(nFix).Fix(KratosMultiphysics.PRESSURE)
self._GetSolver().GetComputingModelPart().GetNode(nFix).SetSolutionStepValue(KratosMultiphysics.PRESSURE, 0.0)
def FinalizeSolutionStep(self):
super(FluidDynamicsAnalysisWithFlush2D,self).FinalizeSolutionStep()
if self.parallel_type == "OpenMP":
now = time.time()
if now - self.last_flush > self.flush_frequency:
sys.stdout.flush()
self.last_flush = now
class FluidDynamicsAnalysisWithFlush3D(FluidDynamicsAnalysis):
def __init__(self,model,project_parameters,flush_frequency=10.0):
super(FluidDynamicsAnalysisWithFlush3D,self).__init__(model,project_parameters)
self.flush_frequency = flush_frequency
self.last_flush = time.time()
def ModifyInitialGeometry(self):
init_h = 0.5
for node in self._GetSolver().GetComputingModelPart().Nodes:
distance = node.Z - init_h
node.SetSolutionStepValue(KratosMultiphysics.DISTANCE, distance)
def ApplyBoundaryConditions(self):
v_zero = KratosMultiphysics.Vector(3,0.0)
for node in self._GetSolver().GetComputingModelPart().Nodes:
if abs(node.X) > 0.499 and abs(node.X) < 0.501:
node.Fix(KratosMultiphysics.VELOCITY_X)
node.Fix(KratosMultiphysics.VELOCITY_Y)
node.Fix(KratosMultiphysics.VELOCITY_Z)
node.SetSolutionStepValue(KratosMultiphysics.VELOCITY_X, 0.0)
# node.SetSolutionStepValue(KratosMultiphysics.VELOCITY, v_zero)
if abs(node.Y) > 0.499 and abs(node.Y) < 0.501:
node.Fix(KratosMultiphysics.VELOCITY_X)
node.Fix(KratosMultiphysics.VELOCITY_Y)
node.Fix(KratosMultiphysics.VELOCITY_Z)
node.SetSolutionStepValue(KratosMultiphysics.VELOCITY_Y, 0.0)
# node.SetSolutionStepValue(KratosMultiphysics.VELOCITY, v_zero)
if abs(node.Z) < 0.001 or abs(node.Z) > 0.999:
node.Fix(KratosMultiphysics.VELOCITY_X)
node.Fix(KratosMultiphysics.VELOCITY_Y)
node.Fix(KratosMultiphysics.VELOCITY_Z)
node.SetSolutionStepValue(KratosMultiphysics.VELOCITY_Z, 0.0)
# node.SetSolutionStepValue(KratosMultiphysics.VELOCITY, v_zero)
nFix = 3
v_zero = KratosMultiphysics.Vector(3,0.0)
self._GetSolver().GetComputingModelPart().GetNode(nFix).Fix(KratosMultiphysics.VELOCITY_X)
self._GetSolver().GetComputingModelPart().GetNode(nFix).Fix(KratosMultiphysics.VELOCITY_Y)
self._GetSolver().GetComputingModelPart().GetNode(nFix).Fix(KratosMultiphysics.VELOCITY_Z)
self._GetSolver().GetComputingModelPart().GetNode(nFix).SetSolutionStepValue(KratosMultiphysics.VELOCITY, v_zero)
self._GetSolver().GetComputingModelPart().GetNode(nFix).Fix(KratosMultiphysics.PRESSURE)
self._GetSolver().GetComputingModelPart().GetNode(nFix).SetSolutionStepValue(KratosMultiphysics.PRESSURE, 0.0)
def FinalizeSolutionStep(self):
super(FluidDynamicsAnalysisWithFlush3D,self).FinalizeSolutionStep()
if self.parallel_type == "OpenMP":
now = time.time()
if now - self.last_flush > self.flush_frequency:
sys.stdout.flush()
self.last_flush = now
if __name__ == "__main__":
test = TwoFluidHydrostaticPoolTest()
test.runTwoFluidHydrostaticTest2D()
test.runTwoFluidHydrostaticTest3D()
| 51.038328
| 134
| 0.604997
| 1,217
| 14,648
| 7.103533
| 0.169269
| 0.108271
| 0.067091
| 0.057259
| 0.796992
| 0.793175
| 0.781608
| 0.781608
| 0.766686
| 0.759167
| 0
| 0.015619
| 0.305025
| 14,648
| 287
| 135
| 51.038328
| 0.833595
| 0.046696
| 0
| 0.663717
| 0
| 0
| 0.273496
| 0.054556
| 0
| 0
| 0
| 0
| 0.044248
| 1
| 0.057522
| false
| 0
| 0.048673
| 0
| 0.123894
| 0.017699
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9c3883db5717bd218694a8769f28ef94204d6562
| 69
|
py
|
Python
|
CodingBat/Warmup-1/front3.py
|
arthxvr/coding--python
|
1e91707be6cb8fef816dad0c1a65f2cc3327357e
|
[
"MIT"
] | null | null | null |
CodingBat/Warmup-1/front3.py
|
arthxvr/coding--python
|
1e91707be6cb8fef816dad0c1a65f2cc3327357e
|
[
"MIT"
] | null | null | null |
CodingBat/Warmup-1/front3.py
|
arthxvr/coding--python
|
1e91707be6cb8fef816dad0c1a65f2cc3327357e
|
[
"MIT"
] | null | null | null |
def front3(str):
return str * 3 if len(str) < 3 else str[:3] * 3
| 23
| 51
| 0.57971
| 14
| 69
| 2.857143
| 0.571429
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 0.26087
| 69
| 2
| 52
| 34.5
| 0.686275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
9c3abfbef092561fa6b9458c128c90e380208e32
| 1,322
|
py
|
Python
|
lib/systems/d-allose.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/d-allose.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/d-allose.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
import pulsar as psr
def load_ref_system():
""" Returns d-allose as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 3.0639 -0.8179 -0.1121
C 1.8753 0.1728 -0.1688
C 0.5179 -0.5752 -0.0148
C -0.6988 0.4034 -0.1303
C -2.0478 -0.3574 0.0713
C -3.2201 0.5999 -0.1503
O 1.8364 0.8416 -1.4188
O 0.5670 -1.1855 1.2575
O -0.7910 1.0037 -1.4022
O -2.0751 -1.0149 1.3188
O -3.8839 1.0352 0.7617
H 3.1237 -1.4350 -1.0267
H 1.9729 0.9425 0.6380
H 0.4347 -1.3685 -0.7987
H -0.6033 1.2071 0.6418
H -2.1280 -1.2083 -0.6451
H -3.4301 0.8847 -1.1927
H 2.7039 1.1917 -1.5744
H -0.3212 -1.4862 1.4532
H 0.0966 1.2374 -1.6685
H -2.3118 -0.3765 1.9835
O 4.2946 -0.1446 -0.0955
H 2.9725 -1.4987 0.7549
H 4.3882 0.2532 0.7612
""")
| 41.3125
| 64
| 0.396369
| 198
| 1,322
| 2.631313
| 0.535354
| 0.015355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.546282
| 0.501513
| 1,322
| 31
| 65
| 42.645161
| 0.24431
| 0.079425
| 0
| 0
| 0
| 0
| 0.929825
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| true
| 0
| 0.035714
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9c58e735698e62702c7331c99954fc7443ae2e24
| 41
|
py
|
Python
|
tests/__init__.py
|
arunnarayanan/boilerplate
|
e7163cfc027053da8c31c03c87a09a920e398163
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
arunnarayanan/boilerplate
|
e7163cfc027053da8c31c03c87a09a920e398163
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
arunnarayanan/boilerplate
|
e7163cfc027053da8c31c03c87a09a920e398163
|
[
"MIT"
] | null | null | null |
"""Unit test package for boilerplate."""
| 20.5
| 40
| 0.707317
| 5
| 41
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 1
| 41
| 41
| 0.805556
| 0.829268
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9c7325ee5f321e2c092a426d2f5a949f20e76925
| 471
|
py
|
Python
|
src/minerl/herobraine/hero/__init__.py
|
imatge-upc/pixelcoordEDL
|
353632feed6ac8c93758c1a2a1b7a477e7ff053c
|
[
"MIT"
] | 1
|
2021-06-10T04:03:24.000Z
|
2021-06-10T04:03:24.000Z
|
src/minerl/herobraine/hero/__init__.py
|
imatge-upc/pixelcoordEDL
|
353632feed6ac8c93758c1a2a1b7a477e7ff053c
|
[
"MIT"
] | null | null | null |
src/minerl/herobraine/hero/__init__.py
|
imatge-upc/pixelcoordEDL
|
353632feed6ac8c93758c1a2a1b7a477e7ff053c
|
[
"MIT"
] | null | null | null |
"""
minerl.herobraine.hero -- The interface between Hero (Malmo) and the minerl.herobraine package.
"""
import logging
logger = logging.getLogger(__name__)
import minerl.herobraine.hero.mc
import minerl.herobraine.hero.spaces
# from minerl.herobraine.hero.instance_manager import InstanceManager
from minerl.herobraine.hero.agent_handler import AgentHandler
# from minerl.herobraine.hero.env import HeroEnv
from minerl.herobraine.hero.mc import KEYMAP
| 29.4375
| 96
| 0.794055
| 59
| 471
| 6.237288
| 0.440678
| 0.347826
| 0.380435
| 0.26087
| 0.152174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125265
| 471
| 15
| 97
| 31.4
| 0.893204
| 0.447983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.833333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9c7ba336b09fd960d6d11983cb144597d12c7f32
| 323
|
py
|
Python
|
xoeuf/tests/test_fields_integration_with_reports/tests/__init__.py
|
merchise-autrement/xoeuf
|
583a0faa345480e73110d467203eefd142b0a710
|
[
"BSD-3-Clause"
] | 3
|
2015-05-16T04:40:14.000Z
|
2016-01-26T05:36:20.000Z
|
xoeuf/tests/test_fields_integration_with_reports/tests/__init__.py
|
merchise-autrement/xoeuf
|
583a0faa345480e73110d467203eefd142b0a710
|
[
"BSD-3-Clause"
] | null | null | null |
xoeuf/tests/test_fields_integration_with_reports/tests/__init__.py
|
merchise-autrement/xoeuf
|
583a0faa345480e73110d467203eefd142b0a710
|
[
"BSD-3-Clause"
] | 1
|
2017-03-23T23:08:50.000Z
|
2017-03-23T23:08:50.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# Copyright (c) Merchise Autrement [~º/~] and Contributors
# All rights reserved.
#
# This is free software; you can do what the LICENCE file allows you to.
#
from . import test_report_integration # noqa
| 32.3
| 72
| 0.547988
| 37
| 323
| 4.72973
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003597
| 0.139319
| 323
| 9
| 73
| 35.888889
| 0.625899
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1319a8c071a18b84f37c076059ba74f0eebdd953
| 102
|
py
|
Python
|
pontos-turisticos/enderecos/admin.py
|
LucasVarela42/PontosTuristicos
|
96d8a20739dbd9f56ae26bda069ae1862b89e35d
|
[
"BSD-3-Clause"
] | null | null | null |
pontos-turisticos/enderecos/admin.py
|
LucasVarela42/PontosTuristicos
|
96d8a20739dbd9f56ae26bda069ae1862b89e35d
|
[
"BSD-3-Clause"
] | null | null | null |
pontos-turisticos/enderecos/admin.py
|
LucasVarela42/PontosTuristicos
|
96d8a20739dbd9f56ae26bda069ae1862b89e35d
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from enderecos.models import Endereco
admin.site.register(Endereco)
| 20.4
| 37
| 0.843137
| 14
| 102
| 6.142857
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098039
| 102
| 5
| 38
| 20.4
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
134992ad72e88891be1a167e2d0ee539e91ff0eb
| 55
|
py
|
Python
|
tally_ho/settings/default.py
|
crononauta/tally-ho
|
ba2207bfaef27bee3ff13a393983ca493f767238
|
[
"Apache-2.0"
] | null | null | null |
tally_ho/settings/default.py
|
crononauta/tally-ho
|
ba2207bfaef27bee3ff13a393983ca493f767238
|
[
"Apache-2.0"
] | null | null | null |
tally_ho/settings/default.py
|
crononauta/tally-ho
|
ba2207bfaef27bee3ff13a393983ca493f767238
|
[
"Apache-2.0"
] | null | null | null |
from tally_ho.settings.common import * # flake8: noqa
| 27.5
| 54
| 0.763636
| 8
| 55
| 5.125
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.145455
| 55
| 1
| 55
| 55
| 0.851064
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
13a1a7f7c55e1582f3b11f251429f92e0bcedf8b
| 558
|
py
|
Python
|
Refactoring/Python/prescription.py
|
signed/KataMedicineClash
|
ffe9265f632c3a7a82d25bb015ebeb9abb57cc5a
|
[
"MIT"
] | 7
|
2015-01-02T11:01:08.000Z
|
2019-06-13T07:16:16.000Z
|
Refactoring/Python/prescription.py
|
signed/KataMedicineClash
|
ffe9265f632c3a7a82d25bb015ebeb9abb57cc5a
|
[
"MIT"
] | null | null | null |
Refactoring/Python/prescription.py
|
signed/KataMedicineClash
|
ffe9265f632c3a7a82d25bb015ebeb9abb57cc5a
|
[
"MIT"
] | 8
|
2015-05-17T20:30:11.000Z
|
2021-12-15T22:41:31.000Z
|
from datetime import timedelta, date
class Prescription(object):
def __init__(self, dispense_date=None, days_supply=30):
self.dispense_date = dispense_date or date.today()
self.days_supply = days_supply
def completion_date(self):
return self.dispense_date + timedelta(days = self.days_supply)
def days_taken(self):
return [self.dispense_date + timedelta(days=i) for i in range(self.days_supply)]
def __cmp__(self, other):
return cmp(self.dispense_date, other.dispense_date)
| 34.875
| 88
| 0.684588
| 73
| 558
| 4.931507
| 0.369863
| 0.233333
| 0.222222
| 0.122222
| 0.216667
| 0.216667
| 0.216667
| 0
| 0
| 0
| 0
| 0.00464
| 0.227599
| 558
| 16
| 89
| 34.875
| 0.830626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0.090909
| 0.272727
| 0.818182
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
13df136b853fb96c17e92504ccf459781694a138
| 587
|
py
|
Python
|
nestedtensor/__init__.py
|
swolchok/nestedtensor
|
3300e3bc42394ab4bb226cef8acc631012a72ef0
|
[
"BSD-3-Clause"
] | 229
|
2019-10-23T22:05:35.000Z
|
2022-03-23T00:42:43.000Z
|
nestedtensor/__init__.py
|
swolchok/nestedtensor
|
3300e3bc42394ab4bb226cef8acc631012a72ef0
|
[
"BSD-3-Clause"
] | 110
|
2019-11-07T19:58:00.000Z
|
2022-02-22T21:29:45.000Z
|
nestedtensor/__init__.py
|
swolchok/nestedtensor
|
3300e3bc42394ab4bb226cef8acc631012a72ef0
|
[
"BSD-3-Clause"
] | 25
|
2019-10-28T15:01:21.000Z
|
2022-02-22T21:23:41.000Z
|
import torch
from .nested.creation import as_nested_tensor
from .nested.creation import nested_tensor
from .nested.masking import nested_tensor_from_tensor_mask
from .nested.masking import nested_tensor_from_padded_tensor
from .nested.nested import NestedTensor
from .nested.nested import to_nested_tensor
from .nested.nested import transpose_nchw_nhwc
from .nested.nested import transpose_nhwc_nchw
from .nested.fuser import fuse_conv_bn
from .nested.fuser import fuse_conv_relu
from .nested.fuser import fuse_conv_add_relu
from . import nested
from . import _C
from . import nn
| 25.521739
| 60
| 0.846678
| 90
| 587
| 5.244444
| 0.255556
| 0.233051
| 0.169492
| 0.186441
| 0.552966
| 0.349576
| 0.165254
| 0
| 0
| 0
| 0
| 0
| 0.110733
| 587
| 22
| 61
| 26.681818
| 0.904215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
13e00580b8e56180859b0ba61b03800d01444f19
| 30,577
|
py
|
Python
|
skyportal/tests/api/test_comments.py
|
dmitryduev/skyportal
|
6c2ca82da38954d671c3d8f1de0ee03f362f5190
|
[
"BSD-3-Clause"
] | 1
|
2021-01-20T05:58:16.000Z
|
2021-01-20T05:58:16.000Z
|
skyportal/tests/api/test_comments.py
|
dmitryduev/skyportal
|
6c2ca82da38954d671c3d8f1de0ee03f362f5190
|
[
"BSD-3-Clause"
] | 151
|
2020-10-15T23:49:47.000Z
|
2022-03-12T08:42:46.000Z
|
skyportal/tests/api/test_comments.py
|
dmitryduev/skyportal
|
6c2ca82da38954d671c3d8f1de0ee03f362f5190
|
[
"BSD-3-Clause"
] | null | null | null |
import uuid
from skyportal.tests import api
def test_add_and_retrieve_comment_group_id(comment_token, public_source, public_group):
status, data = api(
'POST',
f'sources/{public_source.id}/comments',
data={
'text': 'Comment text',
'group_ids': [public_group.id],
},
token=comment_token,
)
assert status == 200
comment_id = data['data']['comment_id']
status, data = api(
'GET', f'sources/{public_source.id}/comments/{comment_id}', token=comment_token
)
assert status == 200
assert data['data']['text'] == 'Comment text'
assert data['data']['bot']
def test_add_and_retrieve_comment_no_group_id(comment_token, public_source):
status, data = api(
'POST',
f'sources/{public_source.id}/comments',
data={'text': 'Comment text'},
token=comment_token,
)
assert status == 200
comment_id = data['data']['comment_id']
status, data = api(
'GET', f'sources/{public_source.id}/comments/{comment_id}', token=comment_token
)
assert status == 200
assert data['data']['text'] == 'Comment text'
def test_add_and_retrieve_comment_group_access(
comment_token_two_groups,
public_source_two_groups,
public_group2,
public_group,
comment_token,
):
status, data = api(
'POST',
f'sources/{public_source_two_groups.id}/comments',
data={
'text': 'Comment text',
'group_ids': [public_group2.id],
},
token=comment_token_two_groups,
)
assert status == 200
comment_id = data['data']['comment_id']
# This token belongs to public_group2
status, data = api(
'GET',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
token=comment_token_two_groups,
)
assert status == 200
assert data['data']['text'] == 'Comment text'
# This token does not belong to public_group2
status, data = api(
'GET',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
token=comment_token,
)
assert status == 403
# Both tokens should be able to view this comment
status, data = api(
'POST',
f'sources/{public_source_two_groups.id}/comments',
data={
'text': 'Comment text',
'group_ids': [public_group.id, public_group2.id],
},
token=comment_token_two_groups,
)
assert status == 200
comment_id = data['data']['comment_id']
status, data = api(
'GET',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
token=comment_token_two_groups,
)
assert status == 200
assert data['data']['text'] == 'Comment text'
status, data = api(
'GET',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
token=comment_token,
)
assert status == 200
assert data['data']['text'] == 'Comment text'
def test_update_comment_group_list(
comment_token_two_groups,
public_source_two_groups,
public_group2,
public_group,
comment_token,
):
status, data = api(
'POST',
f'sources/{public_source_two_groups.id}/comments',
data={
'text': 'Comment text',
'group_ids': [public_group2.id],
},
token=comment_token_two_groups,
)
assert status == 200
comment_id = data['data']['comment_id']
# This token belongs to public_group2
status, data = api(
'GET',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
token=comment_token_two_groups,
)
assert status == 200
assert data['data']['text'] == 'Comment text'
# This token does not belnog to public_group2
status, data = api(
'GET',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
token=comment_token,
)
assert status == 403
# Both tokens should be able to view comment after updating group list
status, data = api(
'PUT',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
data={
'text': 'Comment text new',
'group_ids': [public_group.id, public_group2.id],
},
token=comment_token_two_groups,
)
assert status == 200
status, data = api(
'GET',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
token=comment_token_two_groups,
)
assert status == 200
assert data['data']['text'] == 'Comment text new'
status, data = api(
'GET',
f'sources/{public_source_two_groups.id}/comments/{comment_id}',
token=comment_token,
)
assert status == 200
assert data['data']['text'] == 'Comment text new'
def test_cannot_add_comment_without_permission(view_only_token, public_source):
status, data = api(
'POST',
f'sources/{public_source.id}/comments',
data={'text': 'Comment text'},
token=view_only_token,
)
assert status == 400
assert data['status'] == 'error'
def test_delete_comment(comment_token, public_source):
status, data = api(
'POST',
f'sources/{public_source.id}/comments',
data={'text': 'Comment text'},
token=comment_token,
)
assert status == 200
comment_id = data['data']['comment_id']
status, data = api(
'GET', f'sources/{public_source.id}/comments/{comment_id}', token=comment_token
)
assert status == 200
assert data['data']['text'] == 'Comment text'
# try to delete using the wrong object ID
status, data = api(
'DELETE',
f'sources/{public_source.id}zzz/comments/{comment_id}',
token=comment_token,
)
assert status == 400
assert (
"Comment resource ID does not match resource ID given in path"
in data["message"]
)
status, data = api(
'DELETE',
f'sources/{public_source.id}/comments/{comment_id}',
token=comment_token,
)
assert status == 200
status, data = api(
'GET', f'sources/{public_source.id}/comments/{comment_id}', token=comment_token
)
assert status == 403
def test_problematic_put_comment_attachment_1275(
super_admin_token, public_source, public_group
):
status, data = api(
'POST',
f'sources/{public_source.id}/comments',
data={
'text': 'asdf',
'group_ids': [public_group.id],
},
token=super_admin_token,
)
assert status == 200
assert data['status'] == 'success'
# need to specify both comment name and bytes
status2, data2 = api(
'PUT',
f'sources/{public_source.id}/comments/{data["data"]["comment_id"]}',
data={
"attachment_bytes": "eyJ0aW1lc3RhbXAiOiAiMjAyMC0xMS0wNFQxMjowMDowMyIsICJydW4iOiAxODM5LCAiZHVyYXRpb24iOiAwLjE0NiwgInJlc3VsdCI6IHsibW9kZWwiOiAic2FsdDIiLCAiZml0X2xjX3BhcmFtZXRlcnMiOiB7ImJvdW5kcyI6IHsiYyI6IFstMiwgNV0sICJ4MSI6IFstNSwgNV0sICJ6IjogWzAsIDAuMl19fSwgImZpdF9hY2NlcHRhYmxlIjogZmFsc2UsICJwbG90X2luZm8iOiAic2FsdDIgY2hpc3EgMjAuMjkgbmRvZiAxIG9rIGZpdCBGYWxzZSIsICJtb2RlbF9hbmFseXNpcyI6IHsiaGFzX3ByZW1heF9kYXRhIjogdHJ1ZSwgImhhc19wb3N0bWF4X2RhdGEiOiBmYWxzZSwgIngxX2luX3JhbmdlIjogdHJ1ZSwgIl94MV9yYW5nZSI6IFstNCwgNF0sICJjX29rIjogdHJ1ZSwgIl9jX3JhbmdlIjogWy0xLCAyXX0sICJmaXRfcmVzdWx0cyI6IHsieiI6IDAuMTE3NDM5NTYwNTE3MDEwNjUsICJ0MCI6IDI0NTkxNTguODE5NzYyNTE2MywgIngwIjogMC4wMDA2MDg1NTg3NzI2MjI5NDY3LCAieDEiOiAtMC44NzM1ODM5NTY4MTk5NjczLCAiYyI6IC0wLjA1OTg1NTgxMTY2MDA2MzE1LCAibXdlYnYiOiAwLjA5OTU2MTk1NjAzOTAzMTEyLCAibXdyX3YiOiAzLjEsICJ6LmVyciI6IDAuMDIxNTUyNTQwMzEyMzk1NDI0LCAidDAuZXJyIjogMC45NTczNDkzNTY0OTY3MDY2LCAieDAuZXJyIjogNi43NDYwMTY5NDY3ODk5NDllLTA1LCAieDEuZXJyIjogMC42NDc4NTA5NzU5ODY5OTY2LCAiYy5lcnIiOiAwLjEzNDQxMzAzNjM5NjQxMzU1fSwgInNuY29zbW9faW5mbyI6IHsic3VjY2VzcyI6IHRydWUsICJjaGlzcSI6IDIwLjI5NDAxNzkxMDExMjMxMywgIm5kb2YiOiAxLCAiY2hpc3Fkb2YiOiAyMC4yOTQwMTc5MTAxMTIzMTN9LCAiZmx1eF9kaWN0IjogeyJ6dGZnIjogWzAuMCwgLTAuMDQyNjA1ODU1NzUwNDczMzYsIC0wLjE1OTc2MzYxMjQ2NDMxOTQyLCAtMC41MDU5ODM5ODUzNTUyNDg1LCAtMC4xMDU5NzkwNDU0NzgyOTA4MywgMy44MjIzMDg5NDc0ODQ1NzEsIDEyLjE5Njc1NDg2NzEwMDE0NywgMjMuODgzNTQ0OTEzNTM0MTUsIDM4LjIyMzIzMjgyMDM3NzUyLCA1NS4wMTU0NjcyNzgyNzMwOCwgNzQuMzQyNTQxNDE2MzQ5MjIsIDk2LjMyNzQ1MzkzMzE2MDA5LCAxMjEuMjMwODU1MjE3MTg2ODMsIDE0OS42NzE5NzMzNDU4Mzg4NiwgMTc4LjU4NzEzMzY3MjU1MDI3LCAyMDUuMDcyMDU1NjU4Nzg4NDYsIDIyOC4wNTUzMTYwNjg0MjQsIDI0Ny4zNTIzMjM5ODA4Nzg1NywgMjYyLjYwOTI1MTk2MzQ1NzMsIDI3My45NjAxOTExOTE4NTEsIDI4Mi4wMTYzOTE3NTE3NzMsIDI4Ni4zNzI2Mjg3NjU4NjE2MywgMjg2LjY4ODcwMTAxODEzNDQ1LCAyODMuNTU0NTY3MTE3MzQ4MSwgMjc3LjY5MjY0NTUyNDYwODgsIDI2OS40ODM1NDU5ODQ2MTMzLCAyNTkuMTAxNDEyMTkxMjIwOCwgMjQ2Ljk2OTEyOTQ0OTc2NTE1LCAyMzMuODk5MTY4NzM2OTE5MjgsIDIyMC4xMzIyNzI0NjI2MDgzNCwgMjA1LjcxNjY4NzM5MzQxMzE0LCAxOTAuNTk3MTUyMDAzOTQzODUsIDE3NS41NTM4NDE0NDAyMjYyMiwgMTYxLjE2NTk5Mzk2NDQ1NDMyLCAxNDcuNTEwNTE3ODIyNjI0MTcsIDEzNC40NDYyMjA0NjIxODY5NCwgMTIxLjg5NDI1MDE3MDAxNDM5LCAxMDkuOTA0OTI2NzQzMDg2NzIsIDk4Ljg1ODU4MjA1NTk5MDE0LCA4OC44MjIyNDkyNDQ5ODgwNiwgNzkuODU1NDY1Nzk5NzkzOTMsIDcyLjA5NTcyMzMzMTQ2NzIsIDY1LjMwMjgyMzM5NDQzNzU3LCA1OS4yNTMxMjgyMDM2MzYzNDQsIDUzLjkyNjI2NjAzMjI5NDUyLCA0OS40MTk3OTAzMTc3NTMyOCwgNDUuNjc4NTQ4MjU5MDMwNjQsIDQyLjMxMjkyMDczNTM0MDk3LCAzOS4xMzUwNDI0OTI2NjEzMSwgMzYuMTc1NTQ2NTcyMTI5NywgMzMuNDI4OTg3OTU4MTgzNDUsIDMwLjkyODcxOTcyNjIzMzE4NywgMjguNzUyMjgxODYxNTg1NjQsIDI2LjkwMjQ3ODU5MDAzMDA5NiwgMjUuMzMzOTc4MzQ4MzEyMzYsIDI0LjAxMTA1NjIzOTM4MjkzNSwgMjIuOTAzODgyMjIyNjQwNTU0LCAyMS45ODU1MjExMDM5NjA1NTgsIDIxLjE4MzYxNTc5ODI1OTE3MiwgMjAuNDUyMjYzMDY5MTMwNDY3LCAxOS43ODQ2NDgxNDA4MjM3NTUsIDE5LjE3NTU1OTc1NTQwMzQyNywgMTguNjIwMzc0ODc2NTc1MjksIDE4LjExNTA4NTAxNDE3MjMxNSwgMTcuNjU2MjcwMzk5NzY4MzIsIDE3LjI0MDMyMDg1NjEyNTk5LCAxNi44NTkxMTMwMzUwMTQ1NzcsIDE2LjQ4NjE4ODc2Njc0MTY5OCwgMTYuMTE4OTQwODAyOTc3MDQ3LCAxNS43NTg0NzM3NDM2NDc4NjIsIDE1LjQwNTc0NTcwNDAwMTI0NiwgMTUuMDYxNzIzMzM0MjQ2MDQzLCAxNC43MjczODEwMjI5Nzk4MDQsIDE0LjQwMzY5NTMxNTY1OTUzLCAxNC4wOTE2Mzk5MTExNzczNTMsIDEzLjc5MjE4MTE0NDkwNTA1OCwgMTMuNTA2MjczODY5OTM1ODA3LCAxMy4yMzQ4Njg0ODM3Njc1MzksIDEyLjk4MTU3MzMxODc4OTYyN10sICJ6dGZyIjogWzAuMCwgMC4yMDkwMjkyNzU5Mzc3MzQ5LCAwLjc4ODc0NjQ5ODgxMDcwMzksIDEuNzIyNTcyMDQ5MDAyNDYxNCwgMy42OTI5MjkzMjcwMDQxNDMyLCA4Ljc5MDcxNTY1MDg1NDgwNSwgMTcuNjMyNDU0MDIxMTYyMzM1LCAyOS4yNTAwMTgzNTEwOTI2MTcsIDQzLjA2OTYyOTQwMjAzMDcxLCA1OC44MjcyMTA3OTA5ODU1NywgNzYuNDg2NjMwNDI0MzgxMzksIDk2LjE5OTQ4Mzk4ODEwOTcyLCAxMTguMjA0MzUzMTc5NjA4NSwgMTQzLjAwNzYzODUwNjUxMDY2LCAxNjguMzEyOTYwNzk1NjcyMDQsIDE5MS45NDMxNTA1MTU0NTUwNywgMjEzLjExMjQ5MjAxODQ1MDMsIDIzMS43MDU4NjcwOTQ1MjU0MiwgMjQ3LjQ3Nzg0MDIwMzIxNTYsIDI2MC41NjYzOTUwNzY4NzIzNywgMjcxLjQ3ODE4NDY2MTkwMjcsIDI3OS44ODk5NTU4MjU4NTQ1NCwgMjg1LjUzNjM3MjA2NjA5NTA1LCAyODguNTYwNDMyMzQ4MTA3ODQsIDI4OS4yMDkxODAzODM0NjU1LCAyODcuNjUwMjExNzU1NDQxNywgMjg0LjAxMjM5MTM1MDM1NTcsIDI3OC40MDEyNzQyOTkwNjQ1LCAyNzAuNzQ3ODQ1MjM1ODM2MTcsIDI2MS4yMDkzMzY3NjQxOTcyLCAyNDkuOTY1NDI1MjA5MDkzNjQsIDIzNi45OTYwODA4NDE3NDY1NiwgMjIzLjgyMjE4ODMwMjU3MjMsIDIxMS42MDIyMzAxOTM5NTMzMiwgMjAwLjU3Njg4NDg1MTY3NjUsIDE5MC41OTQ5NzcwNDMxMDUzMywgMTgxLjU4NjIyMjIwNDI1NTUzLCAxNzMuNDE4MDQxNzE4Nzc0NzMsIDE2NS43MzA3MDQ1NzY4OTE4NCwgMTU4LjQ4MjI5NTMyNDg2NTAyLCAxNTEuNjk5NTg5MDg1MzM3ODYsIDE0NS40NTE4MTQ5NDI5MDg2NywgMTM5LjU4NDA3NTMzMzM1MzIsIDEzMy45NTkxODY4MzI4Njg3NywgMTI4LjU3MzUxMzA2ODM0MDUsIDEyMy41MTE2ODQ3MzA1NTI0LCAxMTguNzIxNzkwNTIxMTI0MTMsIDExMy44MDQ0MDc3MTY2NDgyOSwgMTA4LjYwODM1NTg2NDcxNzEzLCAxMDMuMjQxMzg2NjgzMDIxNDUsIDk3Ljc2NTI0NjgyMjU0MDA4LCA5Mi4zMzM3MTQxNjc3MjA4MSwgODcuMTk3NDkyMjAyMTk5MDQsIDgyLjQyODMxMDQ2NDc4ODMyLCA3Ny45Nzc5MjUyNzYzNzA4OCwgNzMuODEwMDcxNTY1NjgzNjMsIDY5Ljg5MTk0NDMyNjg0OSwgNjYuMTk5MzM4MTkxNjMwNCwgNjIuODE0MDY4Mjk4NzQ1Njk2LCA1OS43NzUwOTExMzk3MjA2MywgNTcuMDUxMDcyNjc2ODk3MTUsIDU0LjYxNDMxOTc2NDU2NTM0NCwgNTIuNDQxMzM3NjgzNjU1NjYsIDUwLjUxMTUyNzYyMjUyMDkyLCA0OC44MDY4ODUwNDEwMjEyMiwgNDcuMzA5MjE5ODg4NjkxNjE1LCA0NS45ODU1MTU2NDIxNDYxNywgNDQuNzQxNDAwNjkzNDQ3MDQsIDQzLjU2MjcwOTkyODYwOTE2NiwgNDIuNDQ3OTkwOTkzNDI2OTksIDQxLjM5NTQ1MDExNjg4MzUsIDQwLjQwMzQzMDc2MDk2MTY4LCAzOS40NzA0MDYxMjgyMjcyNCwgMzguNTk0OTYwNzIwNjY3NCwgMzcuNzc1Nzc3NzI1ODY5OTQsIDM3LjAxMTYzMDk2NDg1NDUyNSwgMzYuMzAxMzgwMzgzNDY1NjU2LCAzNS42NDM5NTk0MzY0NjcxNSwgMzUuMDQzODYwODA3MzQ5ODk0XSwgIm9ic2pkIjogWzI0NTkxMzYuNDcwOTcxMzA2LCAyNDU5MTM3LjQ3MDk3MTMwNiwgMjQ1OTEzOC40NzA5NzEzMDYsIDI0NTkxMzkuNDcwOTcxMzA2LCAyNDU5MTQwLjQ3MDk3MTMwNiwgMjQ1OTE0MS40NzA5NzEzMDYsIDI0NTkxNDIuNDcwOTcxMzA2LCAyNDU5MTQzLjQ3MDk3MTMwNiwgMjQ1OTE0NC40NzA5NzEzMDYsIDI0NTkxNDUuNDcwOTcxMzA2LCAyNDU5MTQ2LjQ3MDk3MTMwNiwgMjQ1OTE0Ny40NzA5NzEzMDYsIDI0NTkxNDguNDcwOTcxMzA2LCAyNDU5MTQ5LjQ3MDk3MTMwNiwgMjQ1OTE1MC40NzA5NzEzMDYsIDI0NTkxNTEuNDcwOTcxMzA2LCAyNDU5MTUyLjQ3MDk3MTMwNiwgMjQ1OTE1My40NzA5NzEzMDYsIDI0NTkxNTQuNDcwOTcxMzA2LCAyNDU5MTU1LjQ3MDk3MTMwNiwgMjQ1OTE1Ni40NzA5NzEzMDYsIDI0NTkxNTcuNDcwOTcxMzA2LCAyNDU5MTU4LjQ3MDk3MTMwNiwgMjQ1OTE1OS40NzA5NzEzMDYsIDI0NTkxNjAuNDcwOTcxMzA2LCAyNDU5MTYxLjQ3MDk3MTMwNiwgMjQ1OTE2Mi40NzA5NzEzMDYsIDI0NTkxNjMuNDcwOTcxMzA2LCAyNDU5MTY0LjQ3MDk3MTMwNiwgMjQ1OTE2NS40NzA5NzEzMDYsIDI0NTkxNjYuNDcwOTcxMzA2LCAyNDU5MTY3LjQ3MDk3MTMwNiwgMjQ1OTE2OC40NzA5NzEzMDYsIDI0NTkxNjkuNDcwOTcxMzA2LCAyNDU5MTcwLjQ3MDk3MTMwNiwgMjQ1OTE3MS40NzA5NzEzMDYsIDI0NTkxNzIuNDcwOTcxMzA2LCAyNDU5MTczLjQ3MDk3MTMwNiwgMjQ1OTE3NC40NzA5NzEzMDYsIDI0NTkxNzUuNDcwOTcxMzA2LCAyNDU5MTc2LjQ3MDk3MTMwNiwgMjQ1OTE3Ny40NzA5NzEzMDYsIDI0NTkxNzguNDcwOTcxMzA2LCAyNDU5MTc5LjQ3MDk3MTMwNiwgMjQ1OTE4MC40NzA5NzEzMDYsIDI0NTkxODEuNDcwOTcxMzA2LCAyNDU5MTgyLjQ3MDk3MTMwNiwgMjQ1OTE4My40NzA5NzEzMDYsIDI0NTkxODQuNDcwOTcxMzA2LCAyNDU5MTg1LjQ3MDk3MTMwNiwgMjQ1OTE4Ni40NzA5NzEzMDYsIDI0NTkxODcuNDcwOTcxMzA2LCAyNDU5MTg4LjQ3MDk3MTMwNiwgMjQ1OTE4OS40NzA5NzEzMDYsIDI0NTkxOTAuNDcwOTcxMzA2LCAyNDU5MTkxLjQ3MDk3MTMwNiwgMjQ1OTE5Mi40NzA5NzEzMDYsIDI0NTkxOTMuNDcwOTcxMzA2LCAyNDU5MTk0LjQ3MDk3MTMwNiwgMjQ1OTE5NS40NzA5NzEzMDYsIDI0NTkxOTYuNDcwOTcxMzA2LCAyNDU5MTk3LjQ3MDk3MTMwNiwgMjQ1OTE5OC40NzA5NzEzMDYsIDI0NTkxOTkuNDcwOTcxMzA2LCAyNDU5MjAwLjQ3MDk3MTMwNiwgMjQ1OTIwMS40NzA5NzEzMDYsIDI0NTkyMDIuNDcwOTcxMzA2LCAyNDU5MjAzLjQ3MDk3MTMwNiwgMjQ1OTIwNC40NzA5NzEzMDYsIDI0NTkyMDUuNDcwOTcxMzA2LCAyNDU5MjA2LjQ3MDk3MTMwNiwgMjQ1OTIwNy40NzA5NzEzMDYsIDI0NTkyMDguNDcwOTcxMzA2LCAyNDU5MjA5LjQ3MDk3MTMwNiwgMjQ1OTIxMC40NzA5NzEzMDYsIDI0NTkyMTEuNDcwOTcxMzA2LCAyNDU5MjEyLjQ3MDk3MTMwNiwgMjQ1OTIxMy40NzA5NzEzMDYsIDI0NTkyMTQuNDcwOTcxMzA2XX19fQ=="
},
token=super_admin_token,
)
assert status2 == 400
assert data2['status'] == 'error'
payload = {
"attachment_bytes": "eyJ0aW1lc3RhbXAiOiAiMjAyMC0xMS0wNFQxMjowMDowMyIsICJydW4iOiAxODM5LCAiZHVyYXRpb24iOiAwLjE0NiwgInJlc3VsdCI6IHsibW9kZWwiOiAic2FsdDIiLCAiZml0X2xjX3BhcmFtZXRlcnMiOiB7ImJvdW5kcyI6IHsiYyI6IFstMiwgNV0sICJ4MSI6IFstNSwgNV0sICJ6IjogWzAsIDAuMl19fSwgImZpdF9hY2NlcHRhYmxlIjogZmFsc2UsICJwbG90X2luZm8iOiAic2FsdDIgY2hpc3EgMjAuMjkgbmRvZiAxIG9rIGZpdCBGYWxzZSIsICJtb2RlbF9hbmFseXNpcyI6IHsiaGFzX3ByZW1heF9kYXRhIjogdHJ1ZSwgImhhc19wb3N0bWF4X2RhdGEiOiBmYWxzZSwgIngxX2luX3JhbmdlIjogdHJ1ZSwgIl94MV9yYW5nZSI6IFstNCwgNF0sICJjX29rIjogdHJ1ZSwgIl9jX3JhbmdlIjogWy0xLCAyXX0sICJmaXRfcmVzdWx0cyI6IHsieiI6IDAuMTE3NDM5NTYwNTE3MDEwNjUsICJ0MCI6IDI0NTkxNTguODE5NzYyNTE2MywgIngwIjogMC4wMDA2MDg1NTg3NzI2MjI5NDY3LCAieDEiOiAtMC44NzM1ODM5NTY4MTk5NjczLCAiYyI6IC0wLjA1OTg1NTgxMTY2MDA2MzE1LCAibXdlYnYiOiAwLjA5OTU2MTk1NjAzOTAzMTEyLCAibXdyX3YiOiAzLjEsICJ6LmVyciI6IDAuMDIxNTUyNTQwMzEyMzk1NDI0LCAidDAuZXJyIjogMC45NTczNDkzNTY0OTY3MDY2LCAieDAuZXJyIjogNi43NDYwMTY5NDY3ODk5NDllLTA1LCAieDEuZXJyIjogMC42NDc4NTA5NzU5ODY5OTY2LCAiYy5lcnIiOiAwLjEzNDQxMzAzNjM5NjQxMzU1fSwgInNuY29zbW9faW5mbyI6IHsic3VjY2VzcyI6IHRydWUsICJjaGlzcSI6IDIwLjI5NDAxNzkxMDExMjMxMywgIm5kb2YiOiAxLCAiY2hpc3Fkb2YiOiAyMC4yOTQwMTc5MTAxMTIzMTN9LCAiZmx1eF9kaWN0IjogeyJ6dGZnIjogWzAuMCwgLTAuMDQyNjA1ODU1NzUwNDczMzYsIC0wLjE1OTc2MzYxMjQ2NDMxOTQyLCAtMC41MDU5ODM5ODUzNTUyNDg1LCAtMC4xMDU5NzkwNDU0NzgyOTA4MywgMy44MjIzMDg5NDc0ODQ1NzEsIDEyLjE5Njc1NDg2NzEwMDE0NywgMjMuODgzNTQ0OTEzNTM0MTUsIDM4LjIyMzIzMjgyMDM3NzUyLCA1NS4wMTU0NjcyNzgyNzMwOCwgNzQuMzQyNTQxNDE2MzQ5MjIsIDk2LjMyNzQ1MzkzMzE2MDA5LCAxMjEuMjMwODU1MjE3MTg2ODMsIDE0OS42NzE5NzMzNDU4Mzg4NiwgMTc4LjU4NzEzMzY3MjU1MDI3LCAyMDUuMDcyMDU1NjU4Nzg4NDYsIDIyOC4wNTUzMTYwNjg0MjQsIDI0Ny4zNTIzMjM5ODA4Nzg1NywgMjYyLjYwOTI1MTk2MzQ1NzMsIDI3My45NjAxOTExOTE4NTEsIDI4Mi4wMTYzOTE3NTE3NzMsIDI4Ni4zNzI2Mjg3NjU4NjE2MywgMjg2LjY4ODcwMTAxODEzNDQ1LCAyODMuNTU0NTY3MTE3MzQ4MSwgMjc3LjY5MjY0NTUyNDYwODgsIDI2OS40ODM1NDU5ODQ2MTMzLCAyNTkuMTAxNDEyMTkxMjIwOCwgMjQ2Ljk2OTEyOTQ0OTc2NTE1LCAyMzMuODk5MTY4NzM2OTE5MjgsIDIyMC4xMzIyNzI0NjI2MDgzNCwgMjA1LjcxNjY4NzM5MzQxMzE0LCAxOTAuNTk3MTUyMDAzOTQzODUsIDE3NS41NTM4NDE0NDAyMjYyMiwgMTYxLjE2NTk5Mzk2NDQ1NDMyLCAxNDcuNTEwNTE3ODIyNjI0MTcsIDEzNC40NDYyMjA0NjIxODY5NCwgMTIxLjg5NDI1MDE3MDAxNDM5LCAxMDkuOTA0OTI2NzQzMDg2NzIsIDk4Ljg1ODU4MjA1NTk5MDE0LCA4OC44MjIyNDkyNDQ5ODgwNiwgNzkuODU1NDY1Nzk5NzkzOTMsIDcyLjA5NTcyMzMzMTQ2NzIsIDY1LjMwMjgyMzM5NDQzNzU3LCA1OS4yNTMxMjgyMDM2MzYzNDQsIDUzLjkyNjI2NjAzMjI5NDUyLCA0OS40MTk3OTAzMTc3NTMyOCwgNDUuNjc4NTQ4MjU5MDMwNjQsIDQyLjMxMjkyMDczNTM0MDk3LCAzOS4xMzUwNDI0OTI2NjEzMSwgMzYuMTc1NTQ2NTcyMTI5NywgMzMuNDI4OTg3OTU4MTgzNDUsIDMwLjkyODcxOTcyNjIzMzE4NywgMjguNzUyMjgxODYxNTg1NjQsIDI2LjkwMjQ3ODU5MDAzMDA5NiwgMjUuMzMzOTc4MzQ4MzEyMzYsIDI0LjAxMTA1NjIzOTM4MjkzNSwgMjIuOTAzODgyMjIyNjQwNTU0LCAyMS45ODU1MjExMDM5NjA1NTgsIDIxLjE4MzYxNTc5ODI1OTE3MiwgMjAuNDUyMjYzMDY5MTMwNDY3LCAxOS43ODQ2NDgxNDA4MjM3NTUsIDE5LjE3NTU1OTc1NTQwMzQyNywgMTguNjIwMzc0ODc2NTc1MjksIDE4LjExNTA4NTAxNDE3MjMxNSwgMTcuNjU2MjcwMzk5NzY4MzIsIDE3LjI0MDMyMDg1NjEyNTk5LCAxNi44NTkxMTMwMzUwMTQ1NzcsIDE2LjQ4NjE4ODc2Njc0MTY5OCwgMTYuMTE4OTQwODAyOTc3MDQ3LCAxNS43NTg0NzM3NDM2NDc4NjIsIDE1LjQwNTc0NTcwNDAwMTI0NiwgMTUuMDYxNzIzMzM0MjQ2MDQzLCAxNC43MjczODEwMjI5Nzk4MDQsIDE0LjQwMzY5NTMxNTY1OTUzLCAxNC4wOTE2Mzk5MTExNzczNTMsIDEzLjc5MjE4MTE0NDkwNTA1OCwgMTMuNTA2MjczODY5OTM1ODA3LCAxMy4yMzQ4Njg0ODM3Njc1MzksIDEyLjk4MTU3MzMxODc4OTYyN10sICJ6dGZyIjogWzAuMCwgMC4yMDkwMjkyNzU5Mzc3MzQ5LCAwLjc4ODc0NjQ5ODgxMDcwMzksIDEuNzIyNTcyMDQ5MDAyNDYxNCwgMy42OTI5MjkzMjcwMDQxNDMyLCA4Ljc5MDcxNTY1MDg1NDgwNSwgMTcuNjMyNDU0MDIxMTYyMzM1LCAyOS4yNTAwMTgzNTEwOTI2MTcsIDQzLjA2OTYyOTQwMjAzMDcxLCA1OC44MjcyMTA3OTA5ODU1NywgNzYuNDg2NjMwNDI0MzgxMzksIDk2LjE5OTQ4Mzk4ODEwOTcyLCAxMTguMjA0MzUzMTc5NjA4NSwgMTQzLjAwNzYzODUwNjUxMDY2LCAxNjguMzEyOTYwNzk1NjcyMDQsIDE5MS45NDMxNTA1MTU0NTUwNywgMjEzLjExMjQ5MjAxODQ1MDMsIDIzMS43MDU4NjcwOTQ1MjU0MiwgMjQ3LjQ3Nzg0MDIwMzIxNTYsIDI2MC41NjYzOTUwNzY4NzIzNywgMjcxLjQ3ODE4NDY2MTkwMjcsIDI3OS44ODk5NTU4MjU4NTQ1NCwgMjg1LjUzNjM3MjA2NjA5NTA1LCAyODguNTYwNDMyMzQ4MTA3ODQsIDI4OS4yMDkxODAzODM0NjU1LCAyODcuNjUwMjExNzU1NDQxNywgMjg0LjAxMjM5MTM1MDM1NTcsIDI3OC40MDEyNzQyOTkwNjQ1LCAyNzAuNzQ3ODQ1MjM1ODM2MTcsIDI2MS4yMDkzMzY3NjQxOTcyLCAyNDkuOTY1NDI1MjA5MDkzNjQsIDIzNi45OTYwODA4NDE3NDY1NiwgMjIzLjgyMjE4ODMwMjU3MjMsIDIxMS42MDIyMzAxOTM5NTMzMiwgMjAwLjU3Njg4NDg1MTY3NjUsIDE5MC41OTQ5NzcwNDMxMDUzMywgMTgxLjU4NjIyMjIwNDI1NTUzLCAxNzMuNDE4MDQxNzE4Nzc0NzMsIDE2NS43MzA3MDQ1NzY4OTE4NCwgMTU4LjQ4MjI5NTMyNDg2NTAyLCAxNTEuNjk5NTg5MDg1MzM3ODYsIDE0NS40NTE4MTQ5NDI5MDg2NywgMTM5LjU4NDA3NTMzMzM1MzIsIDEzMy45NTkxODY4MzI4Njg3NywgMTI4LjU3MzUxMzA2ODM0MDUsIDEyMy41MTE2ODQ3MzA1NTI0LCAxMTguNzIxNzkwNTIxMTI0MTMsIDExMy44MDQ0MDc3MTY2NDgyOSwgMTA4LjYwODM1NTg2NDcxNzEzLCAxMDMuMjQxMzg2NjgzMDIxNDUsIDk3Ljc2NTI0NjgyMjU0MDA4LCA5Mi4zMzM3MTQxNjc3MjA4MSwgODcuMTk3NDkyMjAyMTk5MDQsIDgyLjQyODMxMDQ2NDc4ODMyLCA3Ny45Nzc5MjUyNzYzNzA4OCwgNzMuODEwMDcxNTY1NjgzNjMsIDY5Ljg5MTk0NDMyNjg0OSwgNjYuMTk5MzM4MTkxNjMwNCwgNjIuODE0MDY4Mjk4NzQ1Njk2LCA1OS43NzUwOTExMzk3MjA2MywgNTcuMDUxMDcyNjc2ODk3MTUsIDU0LjYxNDMxOTc2NDU2NTM0NCwgNTIuNDQxMzM3NjgzNjU1NjYsIDUwLjUxMTUyNzYyMjUyMDkyLCA0OC44MDY4ODUwNDEwMjEyMiwgNDcuMzA5MjE5ODg4NjkxNjE1LCA0NS45ODU1MTU2NDIxNDYxNywgNDQuNzQxNDAwNjkzNDQ3MDQsIDQzLjU2MjcwOTkyODYwOTE2NiwgNDIuNDQ3OTkwOTkzNDI2OTksIDQxLjM5NTQ1MDExNjg4MzUsIDQwLjQwMzQzMDc2MDk2MTY4LCAzOS40NzA0MDYxMjgyMjcyNCwgMzguNTk0OTYwNzIwNjY3NCwgMzcuNzc1Nzc3NzI1ODY5OTQsIDM3LjAxMTYzMDk2NDg1NDUyNSwgMzYuMzAxMzgwMzgzNDY1NjU2LCAzNS42NDM5NTk0MzY0NjcxNSwgMzUuMDQzODYwODA3MzQ5ODk0XSwgIm9ic2pkIjogWzI0NTkxMzYuNDcwOTcxMzA2LCAyNDU5MTM3LjQ3MDk3MTMwNiwgMjQ1OTEzOC40NzA5NzEzMDYsIDI0NTkxMzkuNDcwOTcxMzA2LCAyNDU5MTQwLjQ3MDk3MTMwNiwgMjQ1OTE0MS40NzA5NzEzMDYsIDI0NTkxNDIuNDcwOTcxMzA2LCAyNDU5MTQzLjQ3MDk3MTMwNiwgMjQ1OTE0NC40NzA5NzEzMDYsIDI0NTkxNDUuNDcwOTcxMzA2LCAyNDU5MTQ2LjQ3MDk3MTMwNiwgMjQ1OTE0Ny40NzA5NzEzMDYsIDI0NTkxNDguNDcwOTcxMzA2LCAyNDU5MTQ5LjQ3MDk3MTMwNiwgMjQ1OTE1MC40NzA5NzEzMDYsIDI0NTkxNTEuNDcwOTcxMzA2LCAyNDU5MTUyLjQ3MDk3MTMwNiwgMjQ1OTE1My40NzA5NzEzMDYsIDI0NTkxNTQuNDcwOTcxMzA2LCAyNDU5MTU1LjQ3MDk3MTMwNiwgMjQ1OTE1Ni40NzA5NzEzMDYsIDI0NTkxNTcuNDcwOTcxMzA2LCAyNDU5MTU4LjQ3MDk3MTMwNiwgMjQ1OTE1OS40NzA5NzEzMDYsIDI0NTkxNjAuNDcwOTcxMzA2LCAyNDU5MTYxLjQ3MDk3MTMwNiwgMjQ1OTE2Mi40NzA5NzEzMDYsIDI0NTkxNjMuNDcwOTcxMzA2LCAyNDU5MTY0LjQ3MDk3MTMwNiwgMjQ1OTE2NS40NzA5NzEzMDYsIDI0NTkxNjYuNDcwOTcxMzA2LCAyNDU5MTY3LjQ3MDk3MTMwNiwgMjQ1OTE2OC40NzA5NzEzMDYsIDI0NTkxNjkuNDcwOTcxMzA2LCAyNDU5MTcwLjQ3MDk3MTMwNiwgMjQ1OTE3MS40NzA5NzEzMDYsIDI0NTkxNzIuNDcwOTcxMzA2LCAyNDU5MTczLjQ3MDk3MTMwNiwgMjQ1OTE3NC40NzA5NzEzMDYsIDI0NTkxNzUuNDcwOTcxMzA2LCAyNDU5MTc2LjQ3MDk3MTMwNiwgMjQ1OTE3Ny40NzA5NzEzMDYsIDI0NTkxNzguNDcwOTcxMzA2LCAyNDU5MTc5LjQ3MDk3MTMwNiwgMjQ1OTE4MC40NzA5NzEzMDYsIDI0NTkxODEuNDcwOTcxMzA2LCAyNDU5MTgyLjQ3MDk3MTMwNiwgMjQ1OTE4My40NzA5NzEzMDYsIDI0NTkxODQuNDcwOTcxMzA2LCAyNDU5MTg1LjQ3MDk3MTMwNiwgMjQ1OTE4Ni40NzA5NzEzMDYsIDI0NTkxODcuNDcwOTcxMzA2LCAyNDU5MTg4LjQ3MDk3MTMwNiwgMjQ1OTE4OS40NzA5NzEzMDYsIDI0NTkxOTAuNDcwOTcxMzA2LCAyNDU5MTkxLjQ3MDk3MTMwNiwgMjQ1OTE5Mi40NzA5NzEzMDYsIDI0NTkxOTMuNDcwOTcxMzA2LCAyNDU5MTk0LjQ3MDk3MTMwNiwgMjQ1OTE5NS40NzA5NzEzMDYsIDI0NTkxOTYuNDcwOTcxMzA2LCAyNDU5MTk3LjQ3MDk3MTMwNiwgMjQ1OTE5OC40NzA5NzEzMDYsIDI0NTkxOTkuNDcwOTcxMzA2LCAyNDU5MjAwLjQ3MDk3MTMwNiwgMjQ1OTIwMS40NzA5NzEzMDYsIDI0NTkyMDIuNDcwOTcxMzA2LCAyNDU5MjAzLjQ3MDk3MTMwNiwgMjQ1OTIwNC40NzA5NzEzMDYsIDI0NTkyMDUuNDcwOTcxMzA2LCAyNDU5MjA2LjQ3MDk3MTMwNiwgMjQ1OTIwNy40NzA5NzEzMDYsIDI0NTkyMDguNDcwOTcxMzA2LCAyNDU5MjA5LjQ3MDk3MTMwNiwgMjQ1OTIxMC40NzA5NzEzMDYsIDI0NTkyMTEuNDcwOTcxMzA2LCAyNDU5MjEyLjQ3MDk3MTMwNiwgMjQ1OTIxMy40NzA5NzEzMDYsIDI0NTkyMTQuNDcwOTcxMzA2XX19fQ==",
"attachment_name": "ampel_test.json",
}
status2, data2 = api(
'PUT',
f'sources/{public_source.id}/comments/{data["data"]["comment_id"]}',
data=payload,
token=super_admin_token,
)
assert status2 == 200
assert data2['status'] == 'success'
status3, data3 = api(
'GET',
f'sources/{public_source.id}/comments/{data["data"]["comment_id"]}',
token=super_admin_token,
)
assert status3 == 200
assert data3["status"] == 'success'
assert data3["data"]["attachment_bytes"] == payload['attachment_bytes']
assert data3['data']['attachment_name'] == payload['attachment_name']
def test_problematic_post_comment_attachment_1275(
super_admin_token, public_source, public_group
):
status, data = api(
'POST',
f'sources/{public_source.id}/comments',
data={
'text': 'asdf',
'group_ids': [public_group.id],
"attachment": {
'body': "eyJ0aW1lc3RhbXAiOiAiMjAyMC0xMS0wNFQxMjowMDowMyIsICJydW4iOiAxODM5LCAiZHVyYXRpb24iOiAwLjE0NiwgInJlc3VsdCI6IHsibW9kZWwiOiAic2FsdDIiLCAiZml0X2xjX3BhcmFtZXRlcnMiOiB7ImJvdW5kcyI6IHsiYyI6IFstMiwgNV0sICJ4MSI6IFstNSwgNV0sICJ6IjogWzAsIDAuMl19fSwgImZpdF9hY2NlcHRhYmxlIjogZmFsc2UsICJwbG90X2luZm8iOiAic2FsdDIgY2hpc3EgMjAuMjkgbmRvZiAxIG9rIGZpdCBGYWxzZSIsICJtb2RlbF9hbmFseXNpcyI6IHsiaGFzX3ByZW1heF9kYXRhIjogdHJ1ZSwgImhhc19wb3N0bWF4X2RhdGEiOiBmYWxzZSwgIngxX2luX3JhbmdlIjogdHJ1ZSwgIl94MV9yYW5nZSI6IFstNCwgNF0sICJjX29rIjogdHJ1ZSwgIl9jX3JhbmdlIjogWy0xLCAyXX0sICJmaXRfcmVzdWx0cyI6IHsieiI6IDAuMTE3NDM5NTYwNTE3MDEwNjUsICJ0MCI6IDI0NTkxNTguODE5NzYyNTE2MywgIngwIjogMC4wMDA2MDg1NTg3NzI2MjI5NDY3LCAieDEiOiAtMC44NzM1ODM5NTY4MTk5NjczLCAiYyI6IC0wLjA1OTg1NTgxMTY2MDA2MzE1LCAibXdlYnYiOiAwLjA5OTU2MTk1NjAzOTAzMTEyLCAibXdyX3YiOiAzLjEsICJ6LmVyciI6IDAuMDIxNTUyNTQwMzEyMzk1NDI0LCAidDAuZXJyIjogMC45NTczNDkzNTY0OTY3MDY2LCAieDAuZXJyIjogNi43NDYwMTY5NDY3ODk5NDllLTA1LCAieDEuZXJyIjogMC42NDc4NTA5NzU5ODY5OTY2LCAiYy5lcnIiOiAwLjEzNDQxMzAzNjM5NjQxMzU1fSwgInNuY29zbW9faW5mbyI6IHsic3VjY2VzcyI6IHRydWUsICJjaGlzcSI6IDIwLjI5NDAxNzkxMDExMjMxMywgIm5kb2YiOiAxLCAiY2hpc3Fkb2YiOiAyMC4yOTQwMTc5MTAxMTIzMTN9LCAiZmx1eF9kaWN0IjogeyJ6dGZnIjogWzAuMCwgLTAuMDQyNjA1ODU1NzUwNDczMzYsIC0wLjE1OTc2MzYxMjQ2NDMxOTQyLCAtMC41MDU5ODM5ODUzNTUyNDg1LCAtMC4xMDU5NzkwNDU0NzgyOTA4MywgMy44MjIzMDg5NDc0ODQ1NzEsIDEyLjE5Njc1NDg2NzEwMDE0NywgMjMuODgzNTQ0OTEzNTM0MTUsIDM4LjIyMzIzMjgyMDM3NzUyLCA1NS4wMTU0NjcyNzgyNzMwOCwgNzQuMzQyNTQxNDE2MzQ5MjIsIDk2LjMyNzQ1MzkzMzE2MDA5LCAxMjEuMjMwODU1MjE3MTg2ODMsIDE0OS42NzE5NzMzNDU4Mzg4NiwgMTc4LjU4NzEzMzY3MjU1MDI3LCAyMDUuMDcyMDU1NjU4Nzg4NDYsIDIyOC4wNTUzMTYwNjg0MjQsIDI0Ny4zNTIzMjM5ODA4Nzg1NywgMjYyLjYwOTI1MTk2MzQ1NzMsIDI3My45NjAxOTExOTE4NTEsIDI4Mi4wMTYzOTE3NTE3NzMsIDI4Ni4zNzI2Mjg3NjU4NjE2MywgMjg2LjY4ODcwMTAxODEzNDQ1LCAyODMuNTU0NTY3MTE3MzQ4MSwgMjc3LjY5MjY0NTUyNDYwODgsIDI2OS40ODM1NDU5ODQ2MTMzLCAyNTkuMTAxNDEyMTkxMjIwOCwgMjQ2Ljk2OTEyOTQ0OTc2NTE1LCAyMzMuODk5MTY4NzM2OTE5MjgsIDIyMC4xMzIyNzI0NjI2MDgzNCwgMjA1LjcxNjY4NzM5MzQxMzE0LCAxOTAuNTk3MTUyMDAzOTQzODUsIDE3NS41NTM4NDE0NDAyMjYyMiwgMTYxLjE2NTk5Mzk2NDQ1NDMyLCAxNDcuNTEwNTE3ODIyNjI0MTcsIDEzNC40NDYyMjA0NjIxODY5NCwgMTIxLjg5NDI1MDE3MDAxNDM5LCAxMDkuOTA0OTI2NzQzMDg2NzIsIDk4Ljg1ODU4MjA1NTk5MDE0LCA4OC44MjIyNDkyNDQ5ODgwNiwgNzkuODU1NDY1Nzk5NzkzOTMsIDcyLjA5NTcyMzMzMTQ2NzIsIDY1LjMwMjgyMzM5NDQzNzU3LCA1OS4yNTMxMjgyMDM2MzYzNDQsIDUzLjkyNjI2NjAzMjI5NDUyLCA0OS40MTk3OTAzMTc3NTMyOCwgNDUuNjc4NTQ4MjU5MDMwNjQsIDQyLjMxMjkyMDczNTM0MDk3LCAzOS4xMzUwNDI0OTI2NjEzMSwgMzYuMTc1NTQ2NTcyMTI5NywgMzMuNDI4OTg3OTU4MTgzNDUsIDMwLjkyODcxOTcyNjIzMzE4NywgMjguNzUyMjgxODYxNTg1NjQsIDI2LjkwMjQ3ODU5MDAzMDA5NiwgMjUuMzMzOTc4MzQ4MzEyMzYsIDI0LjAxMTA1NjIzOTM4MjkzNSwgMjIuOTAzODgyMjIyNjQwNTU0LCAyMS45ODU1MjExMDM5NjA1NTgsIDIxLjE4MzYxNTc5ODI1OTE3MiwgMjAuNDUyMjYzMDY5MTMwNDY3LCAxOS43ODQ2NDgxNDA4MjM3NTUsIDE5LjE3NTU1OTc1NTQwMzQyNywgMTguNjIwMzc0ODc2NTc1MjksIDE4LjExNTA4NTAxNDE3MjMxNSwgMTcuNjU2MjcwMzk5NzY4MzIsIDE3LjI0MDMyMDg1NjEyNTk5LCAxNi44NTkxMTMwMzUwMTQ1NzcsIDE2LjQ4NjE4ODc2Njc0MTY5OCwgMTYuMTE4OTQwODAyOTc3MDQ3LCAxNS43NTg0NzM3NDM2NDc4NjIsIDE1LjQwNTc0NTcwNDAwMTI0NiwgMTUuMDYxNzIzMzM0MjQ2MDQzLCAxNC43MjczODEwMjI5Nzk4MDQsIDE0LjQwMzY5NTMxNTY1OTUzLCAxNC4wOTE2Mzk5MTExNzczNTMsIDEzLjc5MjE4MTE0NDkwNTA1OCwgMTMuNTA2MjczODY5OTM1ODA3LCAxMy4yMzQ4Njg0ODM3Njc1MzksIDEyLjk4MTU3MzMxODc4OTYyN10sICJ6dGZyIjogWzAuMCwgMC4yMDkwMjkyNzU5Mzc3MzQ5LCAwLjc4ODc0NjQ5ODgxMDcwMzksIDEuNzIyNTcyMDQ5MDAyNDYxNCwgMy42OTI5MjkzMjcwMDQxNDMyLCA4Ljc5MDcxNTY1MDg1NDgwNSwgMTcuNjMyNDU0MDIxMTYyMzM1LCAyOS4yNTAwMTgzNTEwOTI2MTcsIDQzLjA2OTYyOTQwMjAzMDcxLCA1OC44MjcyMTA3OTA5ODU1NywgNzYuNDg2NjMwNDI0MzgxMzksIDk2LjE5OTQ4Mzk4ODEwOTcyLCAxMTguMjA0MzUzMTc5NjA4NSwgMTQzLjAwNzYzODUwNjUxMDY2LCAxNjguMzEyOTYwNzk1NjcyMDQsIDE5MS45NDMxNTA1MTU0NTUwNywgMjEzLjExMjQ5MjAxODQ1MDMsIDIzMS43MDU4NjcwOTQ1MjU0MiwgMjQ3LjQ3Nzg0MDIwMzIxNTYsIDI2MC41NjYzOTUwNzY4NzIzNywgMjcxLjQ3ODE4NDY2MTkwMjcsIDI3OS44ODk5NTU4MjU4NTQ1NCwgMjg1LjUzNjM3MjA2NjA5NTA1LCAyODguNTYwNDMyMzQ4MTA3ODQsIDI4OS4yMDkxODAzODM0NjU1LCAyODcuNjUwMjExNzU1NDQxNywgMjg0LjAxMjM5MTM1MDM1NTcsIDI3OC40MDEyNzQyOTkwNjQ1LCAyNzAuNzQ3ODQ1MjM1ODM2MTcsIDI2MS4yMDkzMzY3NjQxOTcyLCAyNDkuOTY1NDI1MjA5MDkzNjQsIDIzNi45OTYwODA4NDE3NDY1NiwgMjIzLjgyMjE4ODMwMjU3MjMsIDIxMS42MDIyMzAxOTM5NTMzMiwgMjAwLjU3Njg4NDg1MTY3NjUsIDE5MC41OTQ5NzcwNDMxMDUzMywgMTgxLjU4NjIyMjIwNDI1NTUzLCAxNzMuNDE4MDQxNzE4Nzc0NzMsIDE2NS43MzA3MDQ1NzY4OTE4NCwgMTU4LjQ4MjI5NTMyNDg2NTAyLCAxNTEuNjk5NTg5MDg1MzM3ODYsIDE0NS40NTE4MTQ5NDI5MDg2NywgMTM5LjU4NDA3NTMzMzM1MzIsIDEzMy45NTkxODY4MzI4Njg3NywgMTI4LjU3MzUxMzA2ODM0MDUsIDEyMy41MTE2ODQ3MzA1NTI0LCAxMTguNzIxNzkwNTIxMTI0MTMsIDExMy44MDQ0MDc3MTY2NDgyOSwgMTA4LjYwODM1NTg2NDcxNzEzLCAxMDMuMjQxMzg2NjgzMDIxNDUsIDk3Ljc2NTI0NjgyMjU0MDA4LCA5Mi4zMzM3MTQxNjc3MjA4MSwgODcuMTk3NDkyMjAyMTk5MDQsIDgyLjQyODMxMDQ2NDc4ODMyLCA3Ny45Nzc5MjUyNzYzNzA4OCwgNzMuODEwMDcxNTY1NjgzNjMsIDY5Ljg5MTk0NDMyNjg0OSwgNjYuMTk5MzM4MTkxNjMwNCwgNjIuODE0MDY4Mjk4NzQ1Njk2LCA1OS43NzUwOTExMzk3MjA2MywgNTcuMDUxMDcyNjc2ODk3MTUsIDU0LjYxNDMxOTc2NDU2NTM0NCwgNTIuNDQxMzM3NjgzNjU1NjYsIDUwLjUxMTUyNzYyMjUyMDkyLCA0OC44MDY4ODUwNDEwMjEyMiwgNDcuMzA5MjE5ODg4NjkxNjE1LCA0NS45ODU1MTU2NDIxNDYxNywgNDQuNzQxNDAwNjkzNDQ3MDQsIDQzLjU2MjcwOTkyODYwOTE2NiwgNDIuNDQ3OTkwOTkzNDI2OTksIDQxLjM5NTQ1MDExNjg4MzUsIDQwLjQwMzQzMDc2MDk2MTY4LCAzOS40NzA0MDYxMjgyMjcyNCwgMzguNTk0OTYwNzIwNjY3NCwgMzcuNzc1Nzc3NzI1ODY5OTQsIDM3LjAxMTYzMDk2NDg1NDUyNSwgMzYuMzAxMzgwMzgzNDY1NjU2LCAzNS42NDM5NTk0MzY0NjcxNSwgMzUuMDQzODYwODA3MzQ5ODk0XSwgIm9ic2pkIjogWzI0NTkxMzYuNDcwOTcxMzA2LCAyNDU5MTM3LjQ3MDk3MTMwNiwgMjQ1OTEzOC40NzA5NzEzMDYsIDI0NTkxMzkuNDcwOTcxMzA2LCAyNDU5MTQwLjQ3MDk3MTMwNiwgMjQ1OTE0MS40NzA5NzEzMDYsIDI0NTkxNDIuNDcwOTcxMzA2LCAyNDU5MTQzLjQ3MDk3MTMwNiwgMjQ1OTE0NC40NzA5NzEzMDYsIDI0NTkxNDUuNDcwOTcxMzA2LCAyNDU5MTQ2LjQ3MDk3MTMwNiwgMjQ1OTE0Ny40NzA5NzEzMDYsIDI0NTkxNDguNDcwOTcxMzA2LCAyNDU5MTQ5LjQ3MDk3MTMwNiwgMjQ1OTE1MC40NzA5NzEzMDYsIDI0NTkxNTEuNDcwOTcxMzA2LCAyNDU5MTUyLjQ3MDk3MTMwNiwgMjQ1OTE1My40NzA5NzEzMDYsIDI0NTkxNTQuNDcwOTcxMzA2LCAyNDU5MTU1LjQ3MDk3MTMwNiwgMjQ1OTE1Ni40NzA5NzEzMDYsIDI0NTkxNTcuNDcwOTcxMzA2LCAyNDU5MTU4LjQ3MDk3MTMwNiwgMjQ1OTE1OS40NzA5NzEzMDYsIDI0NTkxNjAuNDcwOTcxMzA2LCAyNDU5MTYxLjQ3MDk3MTMwNiwgMjQ1OTE2Mi40NzA5NzEzMDYsIDI0NTkxNjMuNDcwOTcxMzA2LCAyNDU5MTY0LjQ3MDk3MTMwNiwgMjQ1OTE2NS40NzA5NzEzMDYsIDI0NTkxNjYuNDcwOTcxMzA2LCAyNDU5MTY3LjQ3MDk3MTMwNiwgMjQ1OTE2OC40NzA5NzEzMDYsIDI0NTkxNjkuNDcwOTcxMzA2LCAyNDU5MTcwLjQ3MDk3MTMwNiwgMjQ1OTE3MS40NzA5NzEzMDYsIDI0NTkxNzIuNDcwOTcxMzA2LCAyNDU5MTczLjQ3MDk3MTMwNiwgMjQ1OTE3NC40NzA5NzEzMDYsIDI0NTkxNzUuNDcwOTcxMzA2LCAyNDU5MTc2LjQ3MDk3MTMwNiwgMjQ1OTE3Ny40NzA5NzEzMDYsIDI0NTkxNzguNDcwOTcxMzA2LCAyNDU5MTc5LjQ3MDk3MTMwNiwgMjQ1OTE4MC40NzA5NzEzMDYsIDI0NTkxODEuNDcwOTcxMzA2LCAyNDU5MTgyLjQ3MDk3MTMwNiwgMjQ1OTE4My40NzA5NzEzMDYsIDI0NTkxODQuNDcwOTcxMzA2LCAyNDU5MTg1LjQ3MDk3MTMwNiwgMjQ1OTE4Ni40NzA5NzEzMDYsIDI0NTkxODcuNDcwOTcxMzA2LCAyNDU5MTg4LjQ3MDk3MTMwNiwgMjQ1OTE4OS40NzA5NzEzMDYsIDI0NTkxOTAuNDcwOTcxMzA2LCAyNDU5MTkxLjQ3MDk3MTMwNiwgMjQ1OTE5Mi40NzA5NzEzMDYsIDI0NTkxOTMuNDcwOTcxMzA2LCAyNDU5MTk0LjQ3MDk3MTMwNiwgMjQ1OTE5NS40NzA5NzEzMDYsIDI0NTkxOTYuNDcwOTcxMzA2LCAyNDU5MTk3LjQ3MDk3MTMwNiwgMjQ1OTE5OC40NzA5NzEzMDYsIDI0NTkxOTkuNDcwOTcxMzA2LCAyNDU5MjAwLjQ3MDk3MTMwNiwgMjQ1OTIwMS40NzA5NzEzMDYsIDI0NTkyMDIuNDcwOTcxMzA2LCAyNDU5MjAzLjQ3MDk3MTMwNiwgMjQ1OTIwNC40NzA5NzEzMDYsIDI0NTkyMDUuNDcwOTcxMzA2LCAyNDU5MjA2LjQ3MDk3MTMwNiwgMjQ1OTIwNy40NzA5NzEzMDYsIDI0NTkyMDguNDcwOTcxMzA2LCAyNDU5MjA5LjQ3MDk3MTMwNiwgMjQ1OTIxMC40NzA5NzEzMDYsIDI0NTkyMTEuNDcwOTcxMzA2LCAyNDU5MjEyLjQ3MDk3MTMwNiwgMjQ1OTIxMy40NzA5NzEzMDYsIDI0NTkyMTQuNDcwOTcxMzA2XX19fQ==",
'name': "ampel_test.json",
},
},
token=super_admin_token,
)
assert status == 200
assert data['status'] == 'success'
def test_fetch_all_comments_on_obj(comment_token, public_source):
comment_text = str(uuid.uuid4())
status, data = api(
'POST',
f'sources/{public_source.id}/comments',
data={'text': comment_text},
token=comment_token,
)
assert status == 200
status, data = api(
'GET', f'sources/{public_source.id}/comments', token=comment_token
)
assert status == 200
assert any([comment['text'] == comment_text for comment in data['data']])
| 95.553125
| 7,322
| 0.886091
| 1,026
| 30,577
| 26.164717
| 0.098441
| 0.016986
| 0.015124
| 0.021606
| 0.154628
| 0.152505
| 0.147216
| 0.143602
| 0.141665
| 0.13943
| 0
| 0.10585
| 0.075874
| 30,577
| 319
| 7,323
| 95.852665
| 0.84418
| 0.011774
| 0
| 0.69434
| 0
| 0
| 0.801344
| 0.771186
| 0
| 1
| 0
| 0
| 0.184906
| 1
| 0.033962
| false
| 0
| 0.007547
| 0
| 0.041509
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b918284784f063690690d1ac59c10a5590454d48
| 170
|
py
|
Python
|
OOPS/Modules/Operations.py
|
TausifAnsari/PyHub
|
f6c949dc6a3974f57d7d146708443d0ceeb4418f
|
[
"MIT"
] | 1
|
2020-09-30T19:31:20.000Z
|
2020-09-30T19:31:20.000Z
|
OOPS/Modules/Operations.py
|
TanviSutar/PyHub
|
6281e9f515674fb51f0d0862c26ec18020fa7d83
|
[
"MIT"
] | null | null | null |
OOPS/Modules/Operations.py
|
TanviSutar/PyHub
|
6281e9f515674fb51f0d0862c26ec18020fa7d83
|
[
"MIT"
] | null | null | null |
def add(a,b):
return a+b
def subtraction(a,b):
return a-b
def multiply(a,b):
return a*b
def division(a,b):
return a/b
def modulas(a,b):
return a%b
| 12.142857
| 21
| 0.6
| 35
| 170
| 2.914286
| 0.257143
| 0.196078
| 0.392157
| 0.441176
| 0.607843
| 0.509804
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252941
| 170
| 14
| 22
| 12.142857
| 0.80315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
b91bd599a7a0caca033927acc0b2ad2bb8b2c5f3
| 117
|
py
|
Python
|
server/app/wide/__init__.py
|
Rennty/x-proba
|
939b71296d28b7a0a966c6022ed10dfb1612c6d6
|
[
"MIT"
] | null | null | null |
server/app/wide/__init__.py
|
Rennty/x-proba
|
939b71296d28b7a0a966c6022ed10dfb1612c6d6
|
[
"MIT"
] | null | null | null |
server/app/wide/__init__.py
|
Rennty/x-proba
|
939b71296d28b7a0a966c6022ed10dfb1612c6d6
|
[
"MIT"
] | 2
|
2019-04-17T23:25:40.000Z
|
2019-04-23T14:02:39.000Z
|
# -*- coding: utf-8 -*-
from flask import Blueprint
bp = Blueprint('wide', __name__)
from app.wide import routes
| 13
| 32
| 0.683761
| 16
| 117
| 4.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010417
| 0.179487
| 117
| 8
| 33
| 14.625
| 0.78125
| 0.179487
| 0
| 0
| 0
| 0
| 0.042553
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
b91d27b5680a70cc80ac089f5caac4ec4d90390f
| 65
|
py
|
Python
|
lisp_shell/__init__.py
|
lisp/lisp_kernel
|
203d77e497111ada93b185ec4c92f38b10c73f13
|
[
"BSD-3-Clause"
] | null | null | null |
lisp_shell/__init__.py
|
lisp/lisp_kernel
|
203d77e497111ada93b185ec4c92f38b10c73f13
|
[
"BSD-3-Clause"
] | null | null | null |
lisp_shell/__init__.py
|
lisp/lisp_kernel
|
203d77e497111ada93b185ec4c92f38b10c73f13
|
[
"BSD-3-Clause"
] | null | null | null |
"""A lisp kernel for Jupyter"""
from .kernel import __version__
| 16.25
| 31
| 0.738462
| 9
| 65
| 4.888889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 65
| 3
| 32
| 21.666667
| 0.8
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b966a77f8404b7309d643a91d338b5c055c46450
| 151
|
py
|
Python
|
odherogrid/__init__.py
|
PederHA/odherogrid
|
3e9a5eb0949dc677961e86bf52c838a066f4b5fb
|
[
"MIT"
] | 5
|
2020-08-01T19:45:51.000Z
|
2021-05-17T20:40:42.000Z
|
odherogrid/__init__.py
|
PederHA/odherogrid
|
3e9a5eb0949dc677961e86bf52c838a066f4b5fb
|
[
"MIT"
] | 3
|
2020-08-02T00:00:39.000Z
|
2021-06-02T00:24:56.000Z
|
odherogrid/__init__.py
|
PederHA/odherogrid
|
3e9a5eb0949dc677961e86bf52c838a066f4b5fb
|
[
"MIT"
] | 2
|
2020-08-17T18:15:08.000Z
|
2021-04-02T16:54:20.000Z
|
__version__ = '0.3.1'
from .cli import *
from .config import *
from .enums import *
from .odapi import *
from .odhg import *
from .resources import *
| 16.777778
| 24
| 0.701987
| 22
| 151
| 4.636364
| 0.545455
| 0.490196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.18543
| 151
| 8
| 25
| 18.875
| 0.804878
| 0
| 0
| 0
| 0
| 0
| 0.033113
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.857143
| 0
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
b99b339937fc1050dd7be7bdc103485c0c79c801
| 6,261
|
py
|
Python
|
packages/augur-core/tests/libraries/math/test_safeMathUint256.py
|
zackster/augur
|
74e9ab55477e788c1f2f115d19a18368a443ec87
|
[
"MIT"
] | null | null | null |
packages/augur-core/tests/libraries/math/test_safeMathUint256.py
|
zackster/augur
|
74e9ab55477e788c1f2f115d19a18368a443ec87
|
[
"MIT"
] | null | null | null |
packages/augur-core/tests/libraries/math/test_safeMathUint256.py
|
zackster/augur
|
74e9ab55477e788c1f2f115d19a18368a443ec87
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from ethereum.tools import tester
from ethereum.tools.tester import TransactionFailed
from pytest import fixture, mark, raises
@fixture(scope='session')
def testerSnapshot(sessionFixture):
sessionFixture.upload('solidity_test_helpers/SafeMathUint256Tester.sol')
return sessionFixture.createSnapshot()
@fixture
def testerContractsFixture(sessionFixture, testerSnapshot):
sessionFixture.resetToSnapshot(testerSnapshot)
return sessionFixture
@mark.parametrize('a, b, expectedResult', [
((2**(256) - 1), (2**(256) - 1), "TransactionFailed"),
(1, 0, 0),
(1, 1, 1),
(1, 2, 2)
])
def test_mul(a, b, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
if (expectedResult == "TransactionFailed"):
with raises(TransactionFailed):
safeMathUint256Tester.mul(a, b)
else:
assert safeMathUint256Tester.mul(a, b) == expectedResult
@mark.parametrize('a, b, expectedResult', [
((2**(256) - 1), (2**(256) - 1), 1),
(1, 0, "TransactionFailed"),
(1, 1, 1)
])
def test_div(a, b, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
if (expectedResult == "TransactionFailed"):
with raises(TransactionFailed):
safeMathUint256Tester.div(a, b)
else:
assert safeMathUint256Tester.div(a, b) == expectedResult
@mark.parametrize('a, b, expectedResult', [
((2**(256) - 1), (2**(256) - 1), 0),
(1, 0, 1),
(1, 1, 0)
])
def test_sub(a, b, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
if (expectedResult == "TransactionFailed"):
with raises(TransactionFailed):
safeMathUint256Tester.sub(a, b)
else:
assert safeMathUint256Tester.sub(a, b) == expectedResult
@mark.parametrize('a, b, expectedResult', [
((2**(256) - 1), (2**(256) - 1), "TransactionFailed"),
(1, 0, 1),
(1, 1, 2)
])
def test_add(a, b, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
if (expectedResult == "TransactionFailed"):
with raises(TransactionFailed):
safeMathUint256Tester.add(a, b)
else:
assert safeMathUint256Tester.add(a, b) == expectedResult
@mark.parametrize('a, b, expectedResult', [
(0, 0, 0),
(0, 1, 0),
(1, 0, 0),
(1, 1, 1),
(1, 2, 1),
(2, 1, 1),
])
def test_min(a, b, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
assert safeMathUint256Tester.min(a, b) == expectedResult
@mark.parametrize('a, b, expectedResult', [
(0, 0, 0),
(0, 1, 1),
(1, 0, 1),
(1, 1, 1),
(1, 2, 2),
(2, 1, 2),
])
def test_max(a, b, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
assert safeMathUint256Tester.max(a, b) == expectedResult
def test_getUint256Min(testerContractsFixture):
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
assert safeMathUint256Tester.getUint256Min() == 0
def test_getUint256Max(testerContractsFixture):
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
assert safeMathUint256Tester.getUint256Max() == (2**256)-1
@mark.parametrize('a, b, base, expectedResult', [
((2**(256) - 1), (2**(256) - 1), 10**18, "TransactionFailed"),
(10**18, 0, 10**18, 0),
(10**18, 1, 10**18, 1),
(10**18, 2, 10**18, 2)
])
def test_fxpMul(a, b, base, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
if (expectedResult == "TransactionFailed"):
with raises(TransactionFailed):
safeMathUint256Tester.fxpMul(a, b, base)
else:
assert safeMathUint256Tester.fxpMul(a, b, base) == expectedResult
@mark.parametrize('a, b, base, expectedResult', [
((2**(256) - 1), (2**(256) - 1), 10**18, "TransactionFailed"),
(1, 0, 10**18, "TransactionFailed"),
(1, 1, 10**18, 10**18)
])
def test_fxpDiv(a, b, base, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
if (expectedResult == "TransactionFailed"):
with raises(TransactionFailed):
safeMathUint256Tester.fxpDiv(a, b, base)
else:
assert safeMathUint256Tester.fxpDiv(a, b, base) == expectedResult
@mark.parametrize('a, b, expectedResult', [
(1, 1, True),
(2, 1, True),
(4, 2, True),
(9, 3, True),
(4, 3, False),
(8, 5, False),
(2 * 10 ** 18, 10 ** 18, True),
(2 * 10 ** 18, 2, True),
(1, 0, "TransactionFailed")
])
def test_isMultipleOf(a, b, expectedResult, testerContractsFixture):
testerContractsFixture.uploadAndAddToAugur("../tests/solidity_test_helpers/SafeMathUint256Tester.sol")
safeMathUint256Tester = testerContractsFixture.contracts['SafeMathUint256Tester']
if (expectedResult == "TransactionFailed"):
with raises(TransactionFailed):
safeMathUint256Tester.isMultipleOf(a, b)
else:
assert safeMathUint256Tester.isMultipleOf(a, b) == expectedResult
| 40.134615
| 106
| 0.701166
| 598
| 6,261
| 7.289298
| 0.108696
| 0.0156
| 0.077082
| 0.184217
| 0.827254
| 0.76279
| 0.740996
| 0.68502
| 0.668731
| 0.668731
| 0
| 0.071212
| 0.15892
| 6,261
| 155
| 107
| 40.393548
| 0.756551
| 0.003194
| 0
| 0.521429
| 0
| 0
| 0.195353
| 0.125321
| 0
| 0
| 0
| 0
| 0.078571
| 1
| 0.092857
| false
| 0
| 0.021429
| 0
| 0.128571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
b9b9d3e10b5803d4c514d9e30af495e6649f4fbd
| 754
|
py
|
Python
|
python/cudf/cudf/core/table.py
|
jrhemstad/pygdf
|
8d7bf34ff045fbe1760c90ef825e6719bab1ff80
|
[
"Apache-2.0"
] | 1
|
2020-01-14T01:44:35.000Z
|
2020-01-14T01:44:35.000Z
|
python/cudf/cudf/core/table.py
|
CZZLEGEND/cudf
|
5d2465d6738d00628673fffdc1fac51fad7ef9a7
|
[
"Apache-2.0"
] | null | null | null |
python/cudf/cudf/core/table.py
|
CZZLEGEND/cudf
|
5d2465d6738d00628673fffdc1fac51fad7ef9a7
|
[
"Apache-2.0"
] | null | null | null |
from cudf._libxx.table import _Table
class Table(_Table):
def _unaryop(self, op):
result = self.copy()
for name, col in result._data.items():
result._data[name] = col.unary_operator(op)
return result
def sin(self):
return self._unaryop("sin")
def cos(self):
return self._unaryop("cos")
def tan(self):
return self._unaryop("tan")
def asin(self):
return self._unaryop("asin")
def acos(self):
return self._unaryop("acos")
def atan(self):
return self._unaryop("atan")
def exp(self):
return self._unaryop("exp")
def log(self):
return self._unaryop("log")
def sqrt(self):
return self._unaryop("sqrt")
| 20.378378
| 55
| 0.582228
| 95
| 754
| 4.452632
| 0.315789
| 0.212766
| 0.297872
| 0.446809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.287798
| 754
| 36
| 56
| 20.944444
| 0.78771
| 0
| 0
| 0
| 0
| 0
| 0.041114
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.04
| 0.36
| 0.88
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
b9de65b525c9b3a0039c3dbf8dc3e81472bba265
| 21,133
|
py
|
Python
|
ostap/fitting/tests/test_fitting_models2_2D.py
|
Pro100Tema/ostap
|
1765304fce43714e1f51dfe03be0daa5aa5d490f
|
[
"BSD-3-Clause"
] | null | null | null |
ostap/fitting/tests/test_fitting_models2_2D.py
|
Pro100Tema/ostap
|
1765304fce43714e1f51dfe03be0daa5aa5d490f
|
[
"BSD-3-Clause"
] | null | null | null |
ostap/fitting/tests/test_fitting_models2_2D.py
|
Pro100Tema/ostap
|
1765304fce43714e1f51dfe03be0daa5aa5d490f
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
# Copyright (c) Ostap developers.
# =============================================================================
# @file ostap/fitting/tests/test_fitting_models2_2D.py
# Test module for ostap/fitting/models_2d.py
# - It tests various 2D-non-factrorizeable models
# =============================================================================
""" Test module for ostap/fitting/models_2d.py
- It tests various 2D-non-factrorizeable models
"""
# =============================================================================
from __future__ import print_function
# =============================================================================
import ROOT, random
import ostap.fitting.roofit
import ostap.fitting.models as Models
from ostap.core.core import Ostap, std, VE, dsID
from ostap.logger.utils import rooSilent
import ostap.io.zipshelve as DBASE
from ostap.utils.timing import timing
from builtins import range
# =============================================================================
# logging
# =============================================================================
from ostap.logger.logger import getLogger
if '__main__' == __name__ or '__builtin__' == __name__ :
logger = getLogger ( 'test_fitting_models2_2D' )
else :
logger = getLogger ( __name__ )
# =============================================================================
## make simple test mass
m_x = ROOT.RooRealVar ( 'mass_x' , 'Some test mass(X)' , 3 , 3.2 )
m_y = ROOT.RooRealVar ( 'mass_y' , 'Some test mass(Y)' , 3 , 3.2 )
## book very simple data set
varset = ROOT.RooArgSet ( m_x , m_y )
dataset = ROOT.RooDataSet ( dsID() , 'Test Data set-1' , varset )
m = VE(3.100,0.015**2)
N_ss = 5000
N_sb = 500
N_bs = 500
N_bb = 1000
random.seed(0)
## fill it : 5000 events Gauss * Gauss
for i in range(0,N_ss) :
m_x.value = m.gauss()
m_y.value = m.gauss()
dataset.add ( varset )
## fill it : 2500 events Gauss * const
for i in range(0,N_sb) :
m_x.value = m.gauss()
m_y.value = random.uniform ( *m_y.minmax() )
dataset.add ( varset )
## fill it : 2500 events const * Gauss
for i in range(0,N_bs) :
m_x.value = random.uniform ( *m_x.minmax() )
m_y.value = m.gauss()
dataset.add ( varset )
## fill it : 5000 events const * const
for i in range(0,N_bb) :
m_x.value = random.uniform ( *m_x.minmax() )
m_y.value = random.uniform ( *m_y.minmax() )
dataset.add ( varset )
logger.info ( 'Dataset:%s ' % dataset )
models = set()
# =============================================================================
signal1 = Models.Gauss_pdf ( 'Gx' , xvar = m_x )
signal2 = Models.Gauss_pdf ( 'Gy' , xvar = m_y )
signal2s = signal1.clone ( name = 'GyS' , xvar = m_y )
signal1.mean = m.value ()
signal1.sigma = m.error ()
signal2.mean = m.value ()
signal2.sigma = m.error ()
# =============================================================================
## gauss as signal, const as background
# =============================================================================
def test_const () :
logger.info ('Simplest (factorized) fit model: ( Gauss + const ) x ( Gauss + const ) ' )
model = Models.Fit2D (
signal_x = signal1 ,
signal_y = signal2s ,
)
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % result ( model.SB ) [0] )
logger.info ('B1xS2 : %20s' % result ( model.BS ) [0] )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, second order polynomial as background
# =============================================================================
def test_p2xp2 () :
logger.info ('Simple (factorized) fit model: ( Gauss + P1 ) (x) ( Gauss + P1 ) ' )
model = Models.Fit2D (
suffix = '_2' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = -1 ,
bkg_1y = -1 ,
bkg_2x = -1 ,
bkg_2y = -1 ,
)
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % result ( model.SB ) [0] )
logger.info ('B1xS2 : %20s' % result ( model.BS ) [0] )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, 1st order polynomial as background + non-factorizeable BB
# =============================================================================
def test_p1xp1_BB () :
logger.info ('Simplest non-factorized fit model: ( Gauss + P1 ) (x) ( Gauss + P1 ) + BB' )
model = Models.Fit2D (
suffix = '_3' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = -1 ,
bkg_1y = -1 ,
bkg_2D = Models.PolyPos2D_pdf ( 'P2D' , m_x , m_y , nx = 2 , ny = 2 )
)
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % result ( model.SB ) [0] )
logger.info ('B1xS2 : %20s' % result ( model.BS ) [0] )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, 1st order polynomial as background
# =============================================================================
def test_p1xp1_BBs () :
logger.info ('Non-factorized symmetric background fit model: ( Gauss + P1 ) (x) ( Gauss + P1 ) + BBsym' )
model = Models.Fit2D (
suffix = '_4' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = -1 ,
bkg_1y = -1 ,
bkg_2D = Models.PolyPos2Dsym_pdf ( 'P2Ds' , m_x , m_y , n = 2 )
)
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % result ( model.SB ) [0] )
logger.info ('B1xS2 : %20s' % result ( model.BS ) [0] )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, 1st order polynomial as background
# =============================================================================
##if 1 < 2 :
def test_p1xp1_BBss () :
logger.info ('Symmetrised fit model with non-factorized symmetric background: ( Gauss + P1 ) (x) ( Gauss + P1 ) + BBsym' )
sb = ROOT.RooRealVar('sb','SB',2500 , 0,10000)
model = Models.Fit2D (
suffix = '_5' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = -1 ,
bkg_2D = Models.PolyPos2Dsym_pdf ( 'P2Ds' , m_x , m_y , n = 1 ) ,
sb = sb ,
bs = sb
)
model.SS = N_ss
model.BB = N_bb
model.SB = 2500
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % result ( model.SB ) [0] )
logger.info ('B1xS2 : %20s' % result ( model.BS ) [0] )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, 1st order polynomial as background
# =============================================================================
def test_p1xp1_BBsym () :
logger.info ('Symmetric non-factorized fit model: ( Gauss + P1 ) (x) ( Gauss + P1 ) + BBsym' )
sb = ROOT.RooRealVar('sb','SB',0,10000)
model = Models.Fit2DSym (
suffix = '_6' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = -1 ,
bkg_2D = Models.PolyPos2Dsym_pdf ( 'P2D5' , m_x , m_y , n = 2 ) ,
)
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % ( result ( model.SB ) [0] /2 ) )
logger.info ('B1xS2 : %20s' % ( result ( model.BS ) [0] /2 ) )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, expo times 1st order polynomial as background
# =============================================================================
def test_pbxpb_BB () :
logger.info ('Non-factorizeable background component: ( Gauss + expo*P1 ) (x) ( Gauss + expo*P1 ) + (expo*P1)**2')
model = Models.Fit2D (
suffix = '_7' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = 1 ,
bkg_1y = 1 ,
bkg_2D = Models.ExpoPol2D_pdf ( 'P2D7' , m_x , m_y , nx = 1 , ny = 1 )
)
model.bkg_1x .tau .fix ( 0 )
model.bkg_1y .tau .fix ( 0 )
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % result ( model.SB ) [0] )
logger.info ('B1xS2 : %20s' % result ( model.BS ) [0] )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, expo times 1st order polynomial as background
# =============================================================================
def test_pbxpb_BBs () :
logger.info ('Non-factorizeable background component: ( Gauss + expo*P1 ) (x) ( Gauss + expo*P1 ) + Sym(expo*P1)**2')
model = Models.Fit2D (
suffix = '_8' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = 1 ,
bkg_1y = 1 ,
bkg_2D = Models.ExpoPol2Dsym_pdf ( 'P2D8' , m_x , m_y , n = 1 )
)
model.bkg_1x .tau .fix ( 0 )
model.bkg_1y .tau .fix ( 0 )
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % result ( model.SB ) [0] )
logger.info ('B1xS2 : %20s' % result ( model.BS ) [0] )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, expo times 1st order polynomial as background
# =============================================================================
def test_pbxpb_BBsym () :
logger.info ('Symmetric fit model with non-factorizeable background component: ( Gauss + P1 ) (x) ( Gauss + P1 ) + Sym(expo*P1)**2')
model = Models.Fit2DSym (
suffix = '_9' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = -1 ,
bkg_2D = Models.ExpoPol2Dsym_pdf ( 'P2D9' , m_x , m_y , n = 1 )
)
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % ( result ( model.SB ) [0] /2 ) )
logger.info ('B1xS2 : %20s' % ( result ( model.BS ) [0] /2 ) )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, expo times 1st order polynomial as background
# =============================================================================
##if 1 < 2 :
def test_psxps_BBs () :
logger.info ('Non-factorizeable symmetric background component: ( Gauss + P1 ) (x) ( Gauss + P1 ) + (PS*P1)**2')
PS = Ostap.Math.PhaseSpaceNL( 1.0 , 5.0 , 2 , 5 )
model = Models.Fit2D (
suffix = '_11' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = -1 ,
bkg_1y = -1 ,
bkg_2D = Models.PSPol2Dsym_pdf ( 'P2D11' , m_x , m_y , ps = PS , n = 1 )
)
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % result ( model.SB ) [0] )
logger.info ('B1xS2 : %20s' % result ( model.BS ) [0] )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## gauss as signal, expo times 1st order polynomial as background
# =============================================================================
def test_psxps_BBsym () :
logger.info ('Simmetric fit model with non-factorizeable background component: ( Gauss + P1 ) (x) ( Gauss + P1 ) + (PS*P1)**2')
PS = Ostap.Math.PhaseSpaceNL( 1.0 , 5.0 , 2 , 5 )
model = Models.Fit2DSym (
suffix = '_12' ,
signal_x = signal1 ,
signal_y = signal2s ,
bkg_1x = -1 ,
bkg_2D = Models.PSPol2Dsym_pdf ( 'P2D12' , m_x , m_y , ps = PS , n = 1 )
)
## fit with fixed mass and sigma
with rooSilent() :
result, frame = model. fitTo ( dataset )
model.signal_x.sigma.release ()
model.signal_y.sigma.release ()
model.signal_x.mean .release ()
model.signal_y.mean .release ()
result, frame = model. fitTo ( dataset )
if 0 != result.status() or 3 != result.covQual() :
logger.warning('Fit is not perfect MIGRAD=%d QUAL=%d '
% ( result.status() , result.covQual() ) )
print(result)
else :
logger.info ('S1xS2 : %20s' % result ( model.SS ) [0] )
logger.info ('S1xB2 : %20s' % ( result ( model.SB ) [0] /2 ) )
logger.info ('B1xS2 : %20s' % ( result ( model.BS ) [0] /2 ) )
logger.info ('B1xB2 : %20s' % result ( model.BB ) [0] )
models.add ( model )
# =============================================================================
## check that everything is serializable
# =============================================================================
def test_db() :
logger.info('Saving all objects into DBASE')
with timing( name = 'Save everything to DBASE'), DBASE.tmpdb() as db :
db['m_x' ] = m_x
db['m_y' ] = m_y
db['vars' ] = varset
db['models' ] = models
db['dataset' ] = dataset
db.ls()
# =============================================================================
if '__main__' == __name__ :
from ostap.utils.timing import timing
with timing ('test_const' ) : test_const ()
with timing ('test_p2xp2' ) : test_p2xp2 ()
with timing ('test_p1xp1_BB' ) : test_p1xp1_BB ()
with timing ('test_p1xp1_BBss' ) : test_p1xp1_BBss ()
with timing ('test_p1xp1_BBsym' ) : test_p1xp1_BBsym ()
with timing ('test_pbxpb_BB' ) : test_pbxpb_BB ()
with timing ('test_pbxpb_BBs' ) : test_pbxpb_BBs ()
with timing ('test_pbxpb_BBsym' ) : test_pbxpb_BBsym ()
with timing ('test_psxps_BBs' ) : test_psxps_BBs ()
with timing ('test_psxps_BBsym' ) : test_psxps_BBsym ()
## check finally that everything is serializeable:
with timing ( 'save to DB' ) : test_db ()
# =============================================================================
# The END
# =============================================================================
| 38.918969
| 137
| 0.468604
| 2,262
| 21,133
| 4.258621
| 0.094164
| 0.059172
| 0.063947
| 0.04796
| 0.770165
| 0.765597
| 0.745562
| 0.726876
| 0.717845
| 0.711928
| 0
| 0.036443
| 0.283254
| 21,133
| 542
| 138
| 38.990775
| 0.599525
| 0.209388
| 0
| 0.687166
| 0
| 0.024064
| 0.14259
| 0.001386
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032086
| false
| 0
| 0.029412
| 0
| 0.061497
| 0.032086
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6a0c496671139b16e9718440298b53fd6f8d3291
| 654
|
py
|
Python
|
10. FunctionsClasses/helperfunctions/collectionitem.py
|
michaelbwalker/Python-Data-Cleaning-Cookbook
|
2da8d183e40d38caebba58f1503c3c252855e38d
|
[
"MIT"
] | 1
|
2021-05-31T03:17:00.000Z
|
2021-05-31T03:17:00.000Z
|
10. FunctionsClasses/helperfunctions/collectionitem.py
|
michaelbwalker/Python-Data-Cleaning-Cookbook
|
2da8d183e40d38caebba58f1503c3c252855e38d
|
[
"MIT"
] | null | null | null |
10. FunctionsClasses/helperfunctions/collectionitem.py
|
michaelbwalker/Python-Data-Cleaning-Cookbook
|
2da8d183e40d38caebba58f1503c3c252855e38d
|
[
"MIT"
] | null | null | null |
class Collectionitem:
collectionitemcnt = 0
def __init__(self, colldict):
self.colldict = colldict
Collectionitem.collectionitemcnt+=1
def birthyearcreator1(self):
if ("birth_year" in self.colldict['creators'][0]):
byear = self.colldict['creators'][0]['birth_year']
else:
byear = "Unknown"
return byear
def birthyearsall(self):
byearlist = [item.get('birth_year') for item in \
self.colldict['creators']]
return byearlist
def ncreators(self):
return len(self.colldict['creators'])
def ncitations(self):
return len(self.colldict['citations'])
| 20.4375
| 57
| 0.636086
| 69
| 654
| 5.927536
| 0.405797
| 0.205379
| 0.195599
| 0.107579
| 0.122249
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010101
| 0.243119
| 654
| 31
| 58
| 21.096774
| 0.816162
| 0
| 0
| 0
| 0
| 0
| 0.126623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.263158
| false
| 0
| 0
| 0.105263
| 0.578947
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
6a494cbe792a8868d322b884ea8720674c7474af
| 62
|
py
|
Python
|
scripts/python-shell.py
|
DEMON1A/EasyShell
|
d37589b616f844c4d6f01ad594642033463f346b
|
[
"MIT"
] | 3
|
2021-02-02T15:07:27.000Z
|
2021-05-21T10:02:08.000Z
|
scripts/python-shell.py
|
DEMON1A/EasyShell
|
d37589b616f844c4d6f01ad594642033463f346b
|
[
"MIT"
] | 7
|
2021-02-03T11:14:04.000Z
|
2021-02-18T19:39:25.000Z
|
scripts/python-shell.py
|
DEMON1A/EasyShell
|
d37589b616f844c4d6f01ad594642033463f346b
|
[
"MIT"
] | null | null | null |
from os import system
def Run(Input):
system("python")
| 15.5
| 22
| 0.66129
| 9
| 62
| 4.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225806
| 62
| 4
| 23
| 15.5
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6a499ac680aaf39612e4ee41512c8a88ec405cc2
| 78
|
py
|
Python
|
examples/5.templates/v1.2/hello.py
|
wild-devops/pwshake
|
c514f0f8dddd39ed37466e444de5f2dff574e89c
|
[
"MIT"
] | null | null | null |
examples/5.templates/v1.2/hello.py
|
wild-devops/pwshake
|
c514f0f8dddd39ed37466e444de5f2dff574e89c
|
[
"MIT"
] | null | null | null |
examples/5.templates/v1.2/hello.py
|
wild-devops/pwshake
|
c514f0f8dddd39ed37466e444de5f2dff574e89c
|
[
"MIT"
] | null | null | null |
import sys
print("Hello " + sys.argv[1] + "!", file=sys.stdout, flush=True)
| 26
| 65
| 0.628205
| 12
| 78
| 4.083333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.153846
| 78
| 2
| 66
| 39
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.092105
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
6a4c571b5123b5bece755ff8319b53f0e29c4b14
| 397
|
py
|
Python
|
app/blueprints/admin/errors.py
|
Anioko/landingpage_cms
|
b2d68d28287dd163de7d420b2c20b19050a2326a
|
[
"MIT"
] | null | null | null |
app/blueprints/admin/errors.py
|
Anioko/landingpage_cms
|
b2d68d28287dd163de7d420b2c20b19050a2326a
|
[
"MIT"
] | null | null | null |
app/blueprints/admin/errors.py
|
Anioko/landingpage_cms
|
b2d68d28287dd163de7d420b2c20b19050a2326a
|
[
"MIT"
] | null | null | null |
from flask import render_template
from app.blueprints.admin.views import admin
@admin.app_errorhandler(403)
def forbidden(_):
return render_template('errors/403.html'), 403
@admin.app_errorhandler(404)
def page_not_found(_):
return render_template('errors/404.html'), 404
@admin.app_errorhandler(500)
def internal_server_error(_):
return render_template('errors/500.html'), 500
| 20.894737
| 50
| 0.7733
| 55
| 397
| 5.327273
| 0.436364
| 0.191126
| 0.204778
| 0.266212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076705
| 0.11335
| 397
| 18
| 51
| 22.055556
| 0.755682
| 0
| 0
| 0
| 0
| 0
| 0.11335
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.181818
| 0.272727
| 0.727273
| 0.090909
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e05b30ff8b882b2e04e8fe2446aed80a172e41ee
| 220
|
py
|
Python
|
createc/__init__.py
|
chenxu2394/py_createc
|
7de578dd9369677cedc492231d5273aa710ea96c
|
[
"MIT"
] | null | null | null |
createc/__init__.py
|
chenxu2394/py_createc
|
7de578dd9369677cedc492231d5273aa710ea96c
|
[
"MIT"
] | null | null | null |
createc/__init__.py
|
chenxu2394/py_createc
|
7de578dd9369677cedc492231d5273aa710ea96c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed May 20 22:24:50 2020
@author: xuc1
"""
__version__ = '0.0.1'
from .Createc_pyCOM import CreatecWin32
from .Createc_pyFile import DAT_IMG
from .Createc_pyFile import VERT_SPEC
| 18.333333
| 39
| 0.727273
| 35
| 220
| 4.314286
| 0.771429
| 0.218543
| 0.225166
| 0.304636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101604
| 0.15
| 220
| 11
| 40
| 20
| 0.705882
| 0.331818
| 0
| 0
| 0
| 0
| 0.035971
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0ecb8b5895d601a34ac4838045d0e2615dedf606
| 427
|
py
|
Python
|
aalpy/learning_algs/__init__.py
|
icezyclon/AALpy
|
3c2f05fdbbcdc99b47ba6b918540239568fca17f
|
[
"MIT"
] | null | null | null |
aalpy/learning_algs/__init__.py
|
icezyclon/AALpy
|
3c2f05fdbbcdc99b47ba6b918540239568fca17f
|
[
"MIT"
] | null | null | null |
aalpy/learning_algs/__init__.py
|
icezyclon/AALpy
|
3c2f05fdbbcdc99b47ba6b918540239568fca17f
|
[
"MIT"
] | null | null | null |
# public API for running automata learning algorithms
from .deterministic.LStar import run_Lstar
from .non_deterministic.OnfsmLstar import run_non_det_Lstar
from .non_deterministic.AbstractedOnfsmLstar import run_abstracted_ONFSM_Lstar
from .stochastic.StochasticLStar import run_stochastic_Lstar
from .stochastic_passive.Alergia import run_Alergia, run_JAlergia
from .stochastic_passive.ActiveAleriga import run_active_Alergia
| 61
| 78
| 0.892272
| 55
| 427
| 6.618182
| 0.454545
| 0.148352
| 0.065934
| 0.137363
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074941
| 427
| 7
| 79
| 61
| 0.921519
| 0.119438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
0ed3aea8f9f90fc3bcc2dae8068cdfd7d5a2585d
| 147
|
py
|
Python
|
cgtools/fastmath/__init__.py
|
tneumann/cgtools
|
8f77b6a4642fe79ac85b8449ebd3f72ea0e56032
|
[
"MIT"
] | 10
|
2019-05-02T14:08:32.000Z
|
2021-03-15T16:07:19.000Z
|
cgtools/fastmath/__init__.py
|
tneumann/cgtools
|
8f77b6a4642fe79ac85b8449ebd3f72ea0e56032
|
[
"MIT"
] | null | null | null |
cgtools/fastmath/__init__.py
|
tneumann/cgtools
|
8f77b6a4642fe79ac85b8449ebd3f72ea0e56032
|
[
"MIT"
] | 3
|
2019-05-02T14:08:33.000Z
|
2021-02-10T03:47:29.000Z
|
from .dot import matmat, matvec
from .inv import inv3, inv2
from .cross import cross3
from .kron import multikron
from .polar_dec import polar_dec
| 24.5
| 32
| 0.802721
| 24
| 147
| 4.833333
| 0.583333
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024
| 0.14966
| 147
| 5
| 33
| 29.4
| 0.904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0ef0f3a4830852c838985588d6408ba9048c6404
| 176
|
py
|
Python
|
app/admin/__init__.py
|
uosorio/heroku_face
|
7d6465e71dba17a15d8edaef520adb2fcd09d91e
|
[
"Apache-2.0"
] | 73
|
2018-12-14T18:12:33.000Z
|
2022-02-23T21:39:59.000Z
|
app/admin/__init__.py
|
uosorio/heroku_face
|
7d6465e71dba17a15d8edaef520adb2fcd09d91e
|
[
"Apache-2.0"
] | 4
|
2019-11-23T18:08:18.000Z
|
2021-08-23T09:00:15.000Z
|
app/admin/__init__.py
|
uosorio/heroku_face
|
7d6465e71dba17a15d8edaef520adb2fcd09d91e
|
[
"Apache-2.0"
] | 33
|
2019-06-03T00:30:57.000Z
|
2022-03-10T23:54:43.000Z
|
"""
AUTOR: Juanjo
FECHA DE CREACIÓN: 24/05/2019
"""
from flask import Blueprint
admin_bp = Blueprint('admin', __name__, template_folder='templates')
from . import routes
| 12.571429
| 68
| 0.727273
| 23
| 176
| 5.304348
| 0.826087
| 0.229508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053691
| 0.153409
| 176
| 13
| 69
| 13.538462
| 0.765101
| 0.25
| 0
| 0
| 0
| 0
| 0.114754
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
161945a283c8c737d97a3d56842c6250090cd9c1
| 25
|
py
|
Python
|
widgets/pygal/__init__.py
|
cheak1974/remi-app-template
|
6c724dda0294b41b906134b9062eadce0f6c0617
|
[
"Apache-2.0"
] | 12
|
2020-02-24T08:44:29.000Z
|
2022-02-21T07:18:31.000Z
|
widgets/pygal/__init__.py
|
cheak1974/remi-app-template
|
6c724dda0294b41b906134b9062eadce0f6c0617
|
[
"Apache-2.0"
] | null | null | null |
widgets/pygal/__init__.py
|
cheak1974/remi-app-template
|
6c724dda0294b41b906134b9062eadce0f6c0617
|
[
"Apache-2.0"
] | 3
|
2020-05-02T15:47:09.000Z
|
2021-06-12T23:56:52.000Z
|
from .pygal import Pygal
| 12.5
| 24
| 0.8
| 4
| 25
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
166e60447058bb1027e6cc6809c8f0936259136d
| 472,321
|
py
|
Python
|
storm-client/src/py/storm/ttypes.py
|
hmcl/storm-apache
|
7bc48361531027d078842e3cec00eae6945d22fa
|
[
"Apache-2.0"
] | null | null | null |
storm-client/src/py/storm/ttypes.py
|
hmcl/storm-apache
|
7bc48361531027d078842e3cec00eae6945d22fa
|
[
"Apache-2.0"
] | null | null | null |
storm-client/src/py/storm/ttypes.py
|
hmcl/storm-apache
|
7bc48361531027d078842e3cec00eae6945d22fa
|
[
"Apache-2.0"
] | 1
|
2019-02-07T11:13:29.000Z
|
2019-02-07T11:13:29.000Z
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Autogenerated by Thrift Compiler (0.12.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:utf8strings
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
from thrift.transport import TTransport
all_structs = []
class ComponentType(object):
BOLT = 1
SPOUT = 2
_VALUES_TO_NAMES = {
1: "BOLT",
2: "SPOUT",
}
_NAMES_TO_VALUES = {
"BOLT": 1,
"SPOUT": 2,
}
class TopologyInitialStatus(object):
ACTIVE = 1
INACTIVE = 2
_VALUES_TO_NAMES = {
1: "ACTIVE",
2: "INACTIVE",
}
_NAMES_TO_VALUES = {
"ACTIVE": 1,
"INACTIVE": 2,
}
class AccessControlType(object):
OTHER = 1
USER = 2
_VALUES_TO_NAMES = {
1: "OTHER",
2: "USER",
}
_NAMES_TO_VALUES = {
"OTHER": 1,
"USER": 2,
}
class TopologyStatus(object):
ACTIVE = 1
INACTIVE = 2
REBALANCING = 3
KILLED = 4
_VALUES_TO_NAMES = {
1: "ACTIVE",
2: "INACTIVE",
3: "REBALANCING",
4: "KILLED",
}
_NAMES_TO_VALUES = {
"ACTIVE": 1,
"INACTIVE": 2,
"REBALANCING": 3,
"KILLED": 4,
}
class NumErrorsChoice(object):
ALL = 0
NONE = 1
ONE = 2
_VALUES_TO_NAMES = {
0: "ALL",
1: "NONE",
2: "ONE",
}
_NAMES_TO_VALUES = {
"ALL": 0,
"NONE": 1,
"ONE": 2,
}
class ProfileAction(object):
JPROFILE_STOP = 0
JPROFILE_START = 1
JPROFILE_DUMP = 2
JMAP_DUMP = 3
JSTACK_DUMP = 4
JVM_RESTART = 5
_VALUES_TO_NAMES = {
0: "JPROFILE_STOP",
1: "JPROFILE_START",
2: "JPROFILE_DUMP",
3: "JMAP_DUMP",
4: "JSTACK_DUMP",
5: "JVM_RESTART",
}
_NAMES_TO_VALUES = {
"JPROFILE_STOP": 0,
"JPROFILE_START": 1,
"JPROFILE_DUMP": 2,
"JMAP_DUMP": 3,
"JSTACK_DUMP": 4,
"JVM_RESTART": 5,
}
class LogLevelAction(object):
UNCHANGED = 1
UPDATE = 2
REMOVE = 3
_VALUES_TO_NAMES = {
1: "UNCHANGED",
2: "UPDATE",
3: "REMOVE",
}
_NAMES_TO_VALUES = {
"UNCHANGED": 1,
"UPDATE": 2,
"REMOVE": 3,
}
class DRPCExceptionType(object):
INTERNAL_ERROR = 0
SERVER_SHUTDOWN = 1
SERVER_TIMEOUT = 2
FAILED_REQUEST = 3
_VALUES_TO_NAMES = {
0: "INTERNAL_ERROR",
1: "SERVER_SHUTDOWN",
2: "SERVER_TIMEOUT",
3: "FAILED_REQUEST",
}
_NAMES_TO_VALUES = {
"INTERNAL_ERROR": 0,
"SERVER_SHUTDOWN": 1,
"SERVER_TIMEOUT": 2,
"FAILED_REQUEST": 3,
}
class HBServerMessageType(object):
CREATE_PATH = 0
CREATE_PATH_RESPONSE = 1
EXISTS = 2
EXISTS_RESPONSE = 3
SEND_PULSE = 4
SEND_PULSE_RESPONSE = 5
GET_ALL_PULSE_FOR_PATH = 6
GET_ALL_PULSE_FOR_PATH_RESPONSE = 7
GET_ALL_NODES_FOR_PATH = 8
GET_ALL_NODES_FOR_PATH_RESPONSE = 9
GET_PULSE = 10
GET_PULSE_RESPONSE = 11
DELETE_PATH = 12
DELETE_PATH_RESPONSE = 13
DELETE_PULSE_ID = 14
DELETE_PULSE_ID_RESPONSE = 15
CONTROL_MESSAGE = 16
SASL_MESSAGE_TOKEN = 17
NOT_AUTHORIZED = 18
_VALUES_TO_NAMES = {
0: "CREATE_PATH",
1: "CREATE_PATH_RESPONSE",
2: "EXISTS",
3: "EXISTS_RESPONSE",
4: "SEND_PULSE",
5: "SEND_PULSE_RESPONSE",
6: "GET_ALL_PULSE_FOR_PATH",
7: "GET_ALL_PULSE_FOR_PATH_RESPONSE",
8: "GET_ALL_NODES_FOR_PATH",
9: "GET_ALL_NODES_FOR_PATH_RESPONSE",
10: "GET_PULSE",
11: "GET_PULSE_RESPONSE",
12: "DELETE_PATH",
13: "DELETE_PATH_RESPONSE",
14: "DELETE_PULSE_ID",
15: "DELETE_PULSE_ID_RESPONSE",
16: "CONTROL_MESSAGE",
17: "SASL_MESSAGE_TOKEN",
18: "NOT_AUTHORIZED",
}
_NAMES_TO_VALUES = {
"CREATE_PATH": 0,
"CREATE_PATH_RESPONSE": 1,
"EXISTS": 2,
"EXISTS_RESPONSE": 3,
"SEND_PULSE": 4,
"SEND_PULSE_RESPONSE": 5,
"GET_ALL_PULSE_FOR_PATH": 6,
"GET_ALL_PULSE_FOR_PATH_RESPONSE": 7,
"GET_ALL_NODES_FOR_PATH": 8,
"GET_ALL_NODES_FOR_PATH_RESPONSE": 9,
"GET_PULSE": 10,
"GET_PULSE_RESPONSE": 11,
"DELETE_PATH": 12,
"DELETE_PATH_RESPONSE": 13,
"DELETE_PULSE_ID": 14,
"DELETE_PULSE_ID_RESPONSE": 15,
"CONTROL_MESSAGE": 16,
"SASL_MESSAGE_TOKEN": 17,
"NOT_AUTHORIZED": 18,
}
class WorkerTokenServiceType(object):
NIMBUS = 0
DRPC = 1
SUPERVISOR = 2
_VALUES_TO_NAMES = {
0: "NIMBUS",
1: "DRPC",
2: "SUPERVISOR",
}
_NAMES_TO_VALUES = {
"NIMBUS": 0,
"DRPC": 1,
"SUPERVISOR": 2,
}
class JavaObjectArg(object):
"""
Attributes:
- int_arg
- long_arg
- string_arg
- bool_arg
- binary_arg
- double_arg
"""
def __init__(self, int_arg=None, long_arg=None, string_arg=None, bool_arg=None, binary_arg=None, double_arg=None,):
self.int_arg = int_arg
self.long_arg = long_arg
self.string_arg = string_arg
self.bool_arg = bool_arg
self.binary_arg = binary_arg
self.double_arg = double_arg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.int_arg = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.long_arg = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.string_arg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.bool_arg = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.binary_arg = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.DOUBLE:
self.double_arg = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('JavaObjectArg')
if self.int_arg is not None:
oprot.writeFieldBegin('int_arg', TType.I32, 1)
oprot.writeI32(self.int_arg)
oprot.writeFieldEnd()
if self.long_arg is not None:
oprot.writeFieldBegin('long_arg', TType.I64, 2)
oprot.writeI64(self.long_arg)
oprot.writeFieldEnd()
if self.string_arg is not None:
oprot.writeFieldBegin('string_arg', TType.STRING, 3)
oprot.writeString(self.string_arg.encode('utf-8') if sys.version_info[0] == 2 else self.string_arg)
oprot.writeFieldEnd()
if self.bool_arg is not None:
oprot.writeFieldBegin('bool_arg', TType.BOOL, 4)
oprot.writeBool(self.bool_arg)
oprot.writeFieldEnd()
if self.binary_arg is not None:
oprot.writeFieldBegin('binary_arg', TType.STRING, 5)
oprot.writeBinary(self.binary_arg)
oprot.writeFieldEnd()
if self.double_arg is not None:
oprot.writeFieldBegin('double_arg', TType.DOUBLE, 6)
oprot.writeDouble(self.double_arg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class JavaObject(object):
"""
Attributes:
- full_class_name
- args_list
"""
def __init__(self, full_class_name=None, args_list=None,):
self.full_class_name = full_class_name
self.args_list = args_list
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.full_class_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.args_list = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = JavaObjectArg()
_elem5.read(iprot)
self.args_list.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('JavaObject')
if self.full_class_name is not None:
oprot.writeFieldBegin('full_class_name', TType.STRING, 1)
oprot.writeString(self.full_class_name.encode('utf-8') if sys.version_info[0] == 2 else self.full_class_name)
oprot.writeFieldEnd()
if self.args_list is not None:
oprot.writeFieldBegin('args_list', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.args_list))
for iter6 in self.args_list:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.full_class_name is None:
raise TProtocolException(message='Required field full_class_name is unset!')
if self.args_list is None:
raise TProtocolException(message='Required field args_list is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NullStruct(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('NullStruct')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GlobalStreamId(object):
"""
Attributes:
- componentId
- streamId
"""
def __init__(self, componentId=None, streamId=None,):
self.componentId = componentId
self.streamId = streamId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.componentId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.streamId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GlobalStreamId')
if self.componentId is not None:
oprot.writeFieldBegin('componentId', TType.STRING, 1)
oprot.writeString(self.componentId.encode('utf-8') if sys.version_info[0] == 2 else self.componentId)
oprot.writeFieldEnd()
if self.streamId is not None:
oprot.writeFieldBegin('streamId', TType.STRING, 2)
oprot.writeString(self.streamId.encode('utf-8') if sys.version_info[0] == 2 else self.streamId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.componentId is None:
raise TProtocolException(message='Required field componentId is unset!')
if self.streamId is None:
raise TProtocolException(message='Required field streamId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Grouping(object):
"""
Attributes:
- fields
- shuffle
- all
- none
- direct
- custom_object
- custom_serialized
- local_or_shuffle
"""
def __init__(self, fields=None, shuffle=None, all=None, none=None, direct=None, custom_object=None, custom_serialized=None, local_or_shuffle=None,):
self.fields = fields
self.shuffle = shuffle
self.all = all
self.none = none
self.direct = direct
self.custom_object = custom_object
self.custom_serialized = custom_serialized
self.local_or_shuffle = local_or_shuffle
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.fields = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in range(_size7):
_elem12 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.fields.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.shuffle = NullStruct()
self.shuffle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.all = NullStruct()
self.all.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.none = NullStruct()
self.none.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.direct = NullStruct()
self.direct.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.custom_object = JavaObject()
self.custom_object.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.custom_serialized = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRUCT:
self.local_or_shuffle = NullStruct()
self.local_or_shuffle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Grouping')
if self.fields is not None:
oprot.writeFieldBegin('fields', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.fields))
for iter13 in self.fields:
oprot.writeString(iter13.encode('utf-8') if sys.version_info[0] == 2 else iter13)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.shuffle is not None:
oprot.writeFieldBegin('shuffle', TType.STRUCT, 2)
self.shuffle.write(oprot)
oprot.writeFieldEnd()
if self.all is not None:
oprot.writeFieldBegin('all', TType.STRUCT, 3)
self.all.write(oprot)
oprot.writeFieldEnd()
if self.none is not None:
oprot.writeFieldBegin('none', TType.STRUCT, 4)
self.none.write(oprot)
oprot.writeFieldEnd()
if self.direct is not None:
oprot.writeFieldBegin('direct', TType.STRUCT, 5)
self.direct.write(oprot)
oprot.writeFieldEnd()
if self.custom_object is not None:
oprot.writeFieldBegin('custom_object', TType.STRUCT, 6)
self.custom_object.write(oprot)
oprot.writeFieldEnd()
if self.custom_serialized is not None:
oprot.writeFieldBegin('custom_serialized', TType.STRING, 7)
oprot.writeBinary(self.custom_serialized)
oprot.writeFieldEnd()
if self.local_or_shuffle is not None:
oprot.writeFieldBegin('local_or_shuffle', TType.STRUCT, 8)
self.local_or_shuffle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StreamInfo(object):
"""
Attributes:
- output_fields
- direct
"""
def __init__(self, output_fields=None, direct=None,):
self.output_fields = output_fields
self.direct = direct
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.output_fields = []
(_etype17, _size14) = iprot.readListBegin()
for _i18 in range(_size14):
_elem19 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.output_fields.append(_elem19)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.direct = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StreamInfo')
if self.output_fields is not None:
oprot.writeFieldBegin('output_fields', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.output_fields))
for iter20 in self.output_fields:
oprot.writeString(iter20.encode('utf-8') if sys.version_info[0] == 2 else iter20)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.direct is not None:
oprot.writeFieldBegin('direct', TType.BOOL, 2)
oprot.writeBool(self.direct)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.output_fields is None:
raise TProtocolException(message='Required field output_fields is unset!')
if self.direct is None:
raise TProtocolException(message='Required field direct is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ShellComponent(object):
"""
Attributes:
- execution_command
- script
"""
def __init__(self, execution_command=None, script=None,):
self.execution_command = execution_command
self.script = script
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.execution_command = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.script = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ShellComponent')
if self.execution_command is not None:
oprot.writeFieldBegin('execution_command', TType.STRING, 1)
oprot.writeString(self.execution_command.encode('utf-8') if sys.version_info[0] == 2 else self.execution_command)
oprot.writeFieldEnd()
if self.script is not None:
oprot.writeFieldBegin('script', TType.STRING, 2)
oprot.writeString(self.script.encode('utf-8') if sys.version_info[0] == 2 else self.script)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ComponentObject(object):
"""
Attributes:
- serialized_java
- shell
- java_object
"""
def __init__(self, serialized_java=None, shell=None, java_object=None,):
self.serialized_java = serialized_java
self.shell = shell
self.java_object = java_object
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.serialized_java = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.shell = ShellComponent()
self.shell.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.java_object = JavaObject()
self.java_object.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ComponentObject')
if self.serialized_java is not None:
oprot.writeFieldBegin('serialized_java', TType.STRING, 1)
oprot.writeBinary(self.serialized_java)
oprot.writeFieldEnd()
if self.shell is not None:
oprot.writeFieldBegin('shell', TType.STRUCT, 2)
self.shell.write(oprot)
oprot.writeFieldEnd()
if self.java_object is not None:
oprot.writeFieldBegin('java_object', TType.STRUCT, 3)
self.java_object.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ComponentCommon(object):
"""
Attributes:
- inputs
- streams
- parallelism_hint
- json_conf
"""
def __init__(self, inputs=None, streams=None, parallelism_hint=None, json_conf=None,):
self.inputs = inputs
self.streams = streams
self.parallelism_hint = parallelism_hint
self.json_conf = json_conf
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.inputs = {}
(_ktype22, _vtype23, _size21) = iprot.readMapBegin()
for _i25 in range(_size21):
_key26 = GlobalStreamId()
_key26.read(iprot)
_val27 = Grouping()
_val27.read(iprot)
self.inputs[_key26] = _val27
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.streams = {}
(_ktype29, _vtype30, _size28) = iprot.readMapBegin()
for _i32 in range(_size28):
_key33 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val34 = StreamInfo()
_val34.read(iprot)
self.streams[_key33] = _val34
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.parallelism_hint = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.json_conf = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ComponentCommon')
if self.inputs is not None:
oprot.writeFieldBegin('inputs', TType.MAP, 1)
oprot.writeMapBegin(TType.STRUCT, TType.STRUCT, len(self.inputs))
for kiter35, viter36 in self.inputs.items():
kiter35.write(oprot)
viter36.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.streams is not None:
oprot.writeFieldBegin('streams', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.streams))
for kiter37, viter38 in self.streams.items():
oprot.writeString(kiter37.encode('utf-8') if sys.version_info[0] == 2 else kiter37)
viter38.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.parallelism_hint is not None:
oprot.writeFieldBegin('parallelism_hint', TType.I32, 3)
oprot.writeI32(self.parallelism_hint)
oprot.writeFieldEnd()
if self.json_conf is not None:
oprot.writeFieldBegin('json_conf', TType.STRING, 4)
oprot.writeString(self.json_conf.encode('utf-8') if sys.version_info[0] == 2 else self.json_conf)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.inputs is None:
raise TProtocolException(message='Required field inputs is unset!')
if self.streams is None:
raise TProtocolException(message='Required field streams is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SpoutSpec(object):
"""
Attributes:
- spout_object
- common
"""
def __init__(self, spout_object=None, common=None,):
self.spout_object = spout_object
self.common = common
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.spout_object = ComponentObject()
self.spout_object.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.common = ComponentCommon()
self.common.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SpoutSpec')
if self.spout_object is not None:
oprot.writeFieldBegin('spout_object', TType.STRUCT, 1)
self.spout_object.write(oprot)
oprot.writeFieldEnd()
if self.common is not None:
oprot.writeFieldBegin('common', TType.STRUCT, 2)
self.common.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.spout_object is None:
raise TProtocolException(message='Required field spout_object is unset!')
if self.common is None:
raise TProtocolException(message='Required field common is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Bolt(object):
"""
Attributes:
- bolt_object
- common
"""
def __init__(self, bolt_object=None, common=None,):
self.bolt_object = bolt_object
self.common = common
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bolt_object = ComponentObject()
self.bolt_object.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.common = ComponentCommon()
self.common.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Bolt')
if self.bolt_object is not None:
oprot.writeFieldBegin('bolt_object', TType.STRUCT, 1)
self.bolt_object.write(oprot)
oprot.writeFieldEnd()
if self.common is not None:
oprot.writeFieldBegin('common', TType.STRUCT, 2)
self.common.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.bolt_object is None:
raise TProtocolException(message='Required field bolt_object is unset!')
if self.common is None:
raise TProtocolException(message='Required field common is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StateSpoutSpec(object):
"""
Attributes:
- state_spout_object
- common
"""
def __init__(self, state_spout_object=None, common=None,):
self.state_spout_object = state_spout_object
self.common = common
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.state_spout_object = ComponentObject()
self.state_spout_object.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.common = ComponentCommon()
self.common.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StateSpoutSpec')
if self.state_spout_object is not None:
oprot.writeFieldBegin('state_spout_object', TType.STRUCT, 1)
self.state_spout_object.write(oprot)
oprot.writeFieldEnd()
if self.common is not None:
oprot.writeFieldBegin('common', TType.STRUCT, 2)
self.common.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.state_spout_object is None:
raise TProtocolException(message='Required field state_spout_object is unset!')
if self.common is None:
raise TProtocolException(message='Required field common is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SharedMemory(object):
"""
Attributes:
- name
- on_heap
- off_heap_worker
- off_heap_node
"""
def __init__(self, name=None, on_heap=None, off_heap_worker=None, off_heap_node=None,):
self.name = name
self.on_heap = on_heap
self.off_heap_worker = off_heap_worker
self.off_heap_node = off_heap_node
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.on_heap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.DOUBLE:
self.off_heap_worker = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.off_heap_node = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SharedMemory')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.on_heap is not None:
oprot.writeFieldBegin('on_heap', TType.DOUBLE, 2)
oprot.writeDouble(self.on_heap)
oprot.writeFieldEnd()
if self.off_heap_worker is not None:
oprot.writeFieldBegin('off_heap_worker', TType.DOUBLE, 3)
oprot.writeDouble(self.off_heap_worker)
oprot.writeFieldEnd()
if self.off_heap_node is not None:
oprot.writeFieldBegin('off_heap_node', TType.DOUBLE, 4)
oprot.writeDouble(self.off_heap_node)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocolException(message='Required field name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StormTopology(object):
"""
Attributes:
- spouts
- bolts
- state_spouts
- worker_hooks
- dependency_jars
- dependency_artifacts
- storm_version
- jdk_version
- component_to_shared_memory
- shared_memory
"""
def __init__(self, spouts=None, bolts=None, state_spouts=None, worker_hooks=None, dependency_jars=None, dependency_artifacts=None, storm_version=None, jdk_version=None, component_to_shared_memory=None, shared_memory=None,):
self.spouts = spouts
self.bolts = bolts
self.state_spouts = state_spouts
self.worker_hooks = worker_hooks
self.dependency_jars = dependency_jars
self.dependency_artifacts = dependency_artifacts
self.storm_version = storm_version
self.jdk_version = jdk_version
self.component_to_shared_memory = component_to_shared_memory
self.shared_memory = shared_memory
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.spouts = {}
(_ktype40, _vtype41, _size39) = iprot.readMapBegin()
for _i43 in range(_size39):
_key44 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val45 = SpoutSpec()
_val45.read(iprot)
self.spouts[_key44] = _val45
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.bolts = {}
(_ktype47, _vtype48, _size46) = iprot.readMapBegin()
for _i50 in range(_size46):
_key51 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val52 = Bolt()
_val52.read(iprot)
self.bolts[_key51] = _val52
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.state_spouts = {}
(_ktype54, _vtype55, _size53) = iprot.readMapBegin()
for _i57 in range(_size53):
_key58 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val59 = StateSpoutSpec()
_val59.read(iprot)
self.state_spouts[_key58] = _val59
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.worker_hooks = []
(_etype63, _size60) = iprot.readListBegin()
for _i64 in range(_size60):
_elem65 = iprot.readBinary()
self.worker_hooks.append(_elem65)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.dependency_jars = []
(_etype69, _size66) = iprot.readListBegin()
for _i70 in range(_size66):
_elem71 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.dependency_jars.append(_elem71)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.dependency_artifacts = []
(_etype75, _size72) = iprot.readListBegin()
for _i76 in range(_size72):
_elem77 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.dependency_artifacts.append(_elem77)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.storm_version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.jdk_version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.MAP:
self.component_to_shared_memory = {}
(_ktype79, _vtype80, _size78) = iprot.readMapBegin()
for _i82 in range(_size78):
_key83 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val84 = set()
(_etype88, _size85) = iprot.readSetBegin()
for _i89 in range(_size85):
_elem90 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val84.add(_elem90)
iprot.readSetEnd()
self.component_to_shared_memory[_key83] = _val84
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.MAP:
self.shared_memory = {}
(_ktype92, _vtype93, _size91) = iprot.readMapBegin()
for _i95 in range(_size91):
_key96 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val97 = SharedMemory()
_val97.read(iprot)
self.shared_memory[_key96] = _val97
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StormTopology')
if self.spouts is not None:
oprot.writeFieldBegin('spouts', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.spouts))
for kiter98, viter99 in self.spouts.items():
oprot.writeString(kiter98.encode('utf-8') if sys.version_info[0] == 2 else kiter98)
viter99.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.bolts is not None:
oprot.writeFieldBegin('bolts', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.bolts))
for kiter100, viter101 in self.bolts.items():
oprot.writeString(kiter100.encode('utf-8') if sys.version_info[0] == 2 else kiter100)
viter101.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.state_spouts is not None:
oprot.writeFieldBegin('state_spouts', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.state_spouts))
for kiter102, viter103 in self.state_spouts.items():
oprot.writeString(kiter102.encode('utf-8') if sys.version_info[0] == 2 else kiter102)
viter103.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.worker_hooks is not None:
oprot.writeFieldBegin('worker_hooks', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.worker_hooks))
for iter104 in self.worker_hooks:
oprot.writeBinary(iter104)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dependency_jars is not None:
oprot.writeFieldBegin('dependency_jars', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.dependency_jars))
for iter105 in self.dependency_jars:
oprot.writeString(iter105.encode('utf-8') if sys.version_info[0] == 2 else iter105)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.dependency_artifacts is not None:
oprot.writeFieldBegin('dependency_artifacts', TType.LIST, 6)
oprot.writeListBegin(TType.STRING, len(self.dependency_artifacts))
for iter106 in self.dependency_artifacts:
oprot.writeString(iter106.encode('utf-8') if sys.version_info[0] == 2 else iter106)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.storm_version is not None:
oprot.writeFieldBegin('storm_version', TType.STRING, 7)
oprot.writeString(self.storm_version.encode('utf-8') if sys.version_info[0] == 2 else self.storm_version)
oprot.writeFieldEnd()
if self.jdk_version is not None:
oprot.writeFieldBegin('jdk_version', TType.STRING, 8)
oprot.writeString(self.jdk_version.encode('utf-8') if sys.version_info[0] == 2 else self.jdk_version)
oprot.writeFieldEnd()
if self.component_to_shared_memory is not None:
oprot.writeFieldBegin('component_to_shared_memory', TType.MAP, 9)
oprot.writeMapBegin(TType.STRING, TType.SET, len(self.component_to_shared_memory))
for kiter107, viter108 in self.component_to_shared_memory.items():
oprot.writeString(kiter107.encode('utf-8') if sys.version_info[0] == 2 else kiter107)
oprot.writeSetBegin(TType.STRING, len(viter108))
for iter109 in viter108:
oprot.writeString(iter109.encode('utf-8') if sys.version_info[0] == 2 else iter109)
oprot.writeSetEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.shared_memory is not None:
oprot.writeFieldBegin('shared_memory', TType.MAP, 10)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.shared_memory))
for kiter110, viter111 in self.shared_memory.items():
oprot.writeString(kiter110.encode('utf-8') if sys.version_info[0] == 2 else kiter110)
viter111.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.spouts is None:
raise TProtocolException(message='Required field spouts is unset!')
if self.bolts is None:
raise TProtocolException(message='Required field bolts is unset!')
if self.state_spouts is None:
raise TProtocolException(message='Required field state_spouts is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AlreadyAliveException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AlreadyAliveException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NotAliveException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('NotAliveException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AuthorizationException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AuthorizationException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InvalidTopologyException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('InvalidTopologyException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KeyNotFoundException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('KeyNotFoundException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IllegalStateException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('IllegalStateException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KeyAlreadyExistsException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('KeyAlreadyExistsException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TopologySummary(object):
"""
Attributes:
- id
- name
- num_tasks
- num_executors
- num_workers
- uptime_secs
- status
- storm_version
- topology_version
- sched_status
- owner
- replication_count
- requested_memonheap
- requested_memoffheap
- requested_cpu
- assigned_memonheap
- assigned_memoffheap
- assigned_cpu
"""
def __init__(self, id=None, name=None, num_tasks=None, num_executors=None, num_workers=None, uptime_secs=None, status=None, storm_version=None, topology_version=None, sched_status=None, owner=None, replication_count=None, requested_memonheap=None, requested_memoffheap=None, requested_cpu=None, assigned_memonheap=None, assigned_memoffheap=None, assigned_cpu=None,):
self.id = id
self.name = name
self.num_tasks = num_tasks
self.num_executors = num_executors
self.num_workers = num_workers
self.uptime_secs = uptime_secs
self.status = status
self.storm_version = storm_version
self.topology_version = topology_version
self.sched_status = sched_status
self.owner = owner
self.replication_count = replication_count
self.requested_memonheap = requested_memonheap
self.requested_memoffheap = requested_memoffheap
self.requested_cpu = requested_cpu
self.assigned_memonheap = assigned_memonheap
self.assigned_memoffheap = assigned_memoffheap
self.assigned_cpu = assigned_cpu
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.num_tasks = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.num_executors = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.num_workers = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.status = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.storm_version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.STRING:
self.topology_version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 513:
if ftype == TType.STRING:
self.sched_status = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 514:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 515:
if ftype == TType.I32:
self.replication_count = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 521:
if ftype == TType.DOUBLE:
self.requested_memonheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 522:
if ftype == TType.DOUBLE:
self.requested_memoffheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 523:
if ftype == TType.DOUBLE:
self.requested_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 524:
if ftype == TType.DOUBLE:
self.assigned_memonheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 525:
if ftype == TType.DOUBLE:
self.assigned_memoffheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 526:
if ftype == TType.DOUBLE:
self.assigned_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('TopologySummary')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.num_tasks is not None:
oprot.writeFieldBegin('num_tasks', TType.I32, 3)
oprot.writeI32(self.num_tasks)
oprot.writeFieldEnd()
if self.num_executors is not None:
oprot.writeFieldBegin('num_executors', TType.I32, 4)
oprot.writeI32(self.num_executors)
oprot.writeFieldEnd()
if self.num_workers is not None:
oprot.writeFieldBegin('num_workers', TType.I32, 5)
oprot.writeI32(self.num_workers)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 6)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRING, 7)
oprot.writeString(self.status.encode('utf-8') if sys.version_info[0] == 2 else self.status)
oprot.writeFieldEnd()
if self.storm_version is not None:
oprot.writeFieldBegin('storm_version', TType.STRING, 8)
oprot.writeString(self.storm_version.encode('utf-8') if sys.version_info[0] == 2 else self.storm_version)
oprot.writeFieldEnd()
if self.topology_version is not None:
oprot.writeFieldBegin('topology_version', TType.STRING, 9)
oprot.writeString(self.topology_version.encode('utf-8') if sys.version_info[0] == 2 else self.topology_version)
oprot.writeFieldEnd()
if self.sched_status is not None:
oprot.writeFieldBegin('sched_status', TType.STRING, 513)
oprot.writeString(self.sched_status.encode('utf-8') if sys.version_info[0] == 2 else self.sched_status)
oprot.writeFieldEnd()
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 514)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
if self.replication_count is not None:
oprot.writeFieldBegin('replication_count', TType.I32, 515)
oprot.writeI32(self.replication_count)
oprot.writeFieldEnd()
if self.requested_memonheap is not None:
oprot.writeFieldBegin('requested_memonheap', TType.DOUBLE, 521)
oprot.writeDouble(self.requested_memonheap)
oprot.writeFieldEnd()
if self.requested_memoffheap is not None:
oprot.writeFieldBegin('requested_memoffheap', TType.DOUBLE, 522)
oprot.writeDouble(self.requested_memoffheap)
oprot.writeFieldEnd()
if self.requested_cpu is not None:
oprot.writeFieldBegin('requested_cpu', TType.DOUBLE, 523)
oprot.writeDouble(self.requested_cpu)
oprot.writeFieldEnd()
if self.assigned_memonheap is not None:
oprot.writeFieldBegin('assigned_memonheap', TType.DOUBLE, 524)
oprot.writeDouble(self.assigned_memonheap)
oprot.writeFieldEnd()
if self.assigned_memoffheap is not None:
oprot.writeFieldBegin('assigned_memoffheap', TType.DOUBLE, 525)
oprot.writeDouble(self.assigned_memoffheap)
oprot.writeFieldEnd()
if self.assigned_cpu is not None:
oprot.writeFieldBegin('assigned_cpu', TType.DOUBLE, 526)
oprot.writeDouble(self.assigned_cpu)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.id is None:
raise TProtocolException(message='Required field id is unset!')
if self.name is None:
raise TProtocolException(message='Required field name is unset!')
if self.num_tasks is None:
raise TProtocolException(message='Required field num_tasks is unset!')
if self.num_executors is None:
raise TProtocolException(message='Required field num_executors is unset!')
if self.num_workers is None:
raise TProtocolException(message='Required field num_workers is unset!')
if self.uptime_secs is None:
raise TProtocolException(message='Required field uptime_secs is unset!')
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SupervisorSummary(object):
"""
Attributes:
- host
- uptime_secs
- num_workers
- num_used_workers
- supervisor_id
- version
- total_resources
- used_mem
- used_cpu
- fragmented_mem
- fragmented_cpu
- blacklisted
"""
def __init__(self, host=None, uptime_secs=None, num_workers=None, num_used_workers=None, supervisor_id=None, version="VERSION_NOT_PROVIDED", total_resources=None, used_mem=None, used_cpu=None, fragmented_mem=None, fragmented_cpu=None, blacklisted=None,):
self.host = host
self.uptime_secs = uptime_secs
self.num_workers = num_workers
self.num_used_workers = num_used_workers
self.supervisor_id = supervisor_id
self.version = version
self.total_resources = total_resources
self.used_mem = used_mem
self.used_cpu = used_cpu
self.fragmented_mem = fragmented_mem
self.fragmented_cpu = fragmented_cpu
self.blacklisted = blacklisted
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.num_workers = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.num_used_workers = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.supervisor_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.MAP:
self.total_resources = {}
(_ktype113, _vtype114, _size112) = iprot.readMapBegin()
for _i116 in range(_size112):
_key117 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val118 = iprot.readDouble()
self.total_resources[_key117] = _val118
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.DOUBLE:
self.used_mem = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.DOUBLE:
self.used_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.DOUBLE:
self.fragmented_mem = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.DOUBLE:
self.fragmented_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.BOOL:
self.blacklisted = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SupervisorSummary')
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 1)
oprot.writeString(self.host.encode('utf-8') if sys.version_info[0] == 2 else self.host)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 2)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.num_workers is not None:
oprot.writeFieldBegin('num_workers', TType.I32, 3)
oprot.writeI32(self.num_workers)
oprot.writeFieldEnd()
if self.num_used_workers is not None:
oprot.writeFieldBegin('num_used_workers', TType.I32, 4)
oprot.writeI32(self.num_used_workers)
oprot.writeFieldEnd()
if self.supervisor_id is not None:
oprot.writeFieldBegin('supervisor_id', TType.STRING, 5)
oprot.writeString(self.supervisor_id.encode('utf-8') if sys.version_info[0] == 2 else self.supervisor_id)
oprot.writeFieldEnd()
if self.version is not None:
oprot.writeFieldBegin('version', TType.STRING, 6)
oprot.writeString(self.version.encode('utf-8') if sys.version_info[0] == 2 else self.version)
oprot.writeFieldEnd()
if self.total_resources is not None:
oprot.writeFieldBegin('total_resources', TType.MAP, 7)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.total_resources))
for kiter119, viter120 in self.total_resources.items():
oprot.writeString(kiter119.encode('utf-8') if sys.version_info[0] == 2 else kiter119)
oprot.writeDouble(viter120)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.used_mem is not None:
oprot.writeFieldBegin('used_mem', TType.DOUBLE, 8)
oprot.writeDouble(self.used_mem)
oprot.writeFieldEnd()
if self.used_cpu is not None:
oprot.writeFieldBegin('used_cpu', TType.DOUBLE, 9)
oprot.writeDouble(self.used_cpu)
oprot.writeFieldEnd()
if self.fragmented_mem is not None:
oprot.writeFieldBegin('fragmented_mem', TType.DOUBLE, 10)
oprot.writeDouble(self.fragmented_mem)
oprot.writeFieldEnd()
if self.fragmented_cpu is not None:
oprot.writeFieldBegin('fragmented_cpu', TType.DOUBLE, 11)
oprot.writeDouble(self.fragmented_cpu)
oprot.writeFieldEnd()
if self.blacklisted is not None:
oprot.writeFieldBegin('blacklisted', TType.BOOL, 12)
oprot.writeBool(self.blacklisted)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.host is None:
raise TProtocolException(message='Required field host is unset!')
if self.uptime_secs is None:
raise TProtocolException(message='Required field uptime_secs is unset!')
if self.num_workers is None:
raise TProtocolException(message='Required field num_workers is unset!')
if self.num_used_workers is None:
raise TProtocolException(message='Required field num_used_workers is unset!')
if self.supervisor_id is None:
raise TProtocolException(message='Required field supervisor_id is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NimbusSummary(object):
"""
Attributes:
- host
- port
- uptime_secs
- isLeader
- version
"""
def __init__(self, host=None, port=None, uptime_secs=None, isLeader=None, version=None,):
self.host = host
self.port = port
self.uptime_secs = uptime_secs
self.isLeader = isLeader
self.version = version
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.isLeader = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('NimbusSummary')
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 1)
oprot.writeString(self.host.encode('utf-8') if sys.version_info[0] == 2 else self.host)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 2)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 3)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.isLeader is not None:
oprot.writeFieldBegin('isLeader', TType.BOOL, 4)
oprot.writeBool(self.isLeader)
oprot.writeFieldEnd()
if self.version is not None:
oprot.writeFieldBegin('version', TType.STRING, 5)
oprot.writeString(self.version.encode('utf-8') if sys.version_info[0] == 2 else self.version)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.host is None:
raise TProtocolException(message='Required field host is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
if self.uptime_secs is None:
raise TProtocolException(message='Required field uptime_secs is unset!')
if self.isLeader is None:
raise TProtocolException(message='Required field isLeader is unset!')
if self.version is None:
raise TProtocolException(message='Required field version is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ClusterSummary(object):
"""
Attributes:
- supervisors
- topologies
- nimbuses
"""
def __init__(self, supervisors=None, topologies=None, nimbuses=None,):
self.supervisors = supervisors
self.topologies = topologies
self.nimbuses = nimbuses
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.supervisors = []
(_etype124, _size121) = iprot.readListBegin()
for _i125 in range(_size121):
_elem126 = SupervisorSummary()
_elem126.read(iprot)
self.supervisors.append(_elem126)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.topologies = []
(_etype130, _size127) = iprot.readListBegin()
for _i131 in range(_size127):
_elem132 = TopologySummary()
_elem132.read(iprot)
self.topologies.append(_elem132)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.nimbuses = []
(_etype136, _size133) = iprot.readListBegin()
for _i137 in range(_size133):
_elem138 = NimbusSummary()
_elem138.read(iprot)
self.nimbuses.append(_elem138)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ClusterSummary')
if self.supervisors is not None:
oprot.writeFieldBegin('supervisors', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.supervisors))
for iter139 in self.supervisors:
iter139.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.topologies is not None:
oprot.writeFieldBegin('topologies', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.topologies))
for iter140 in self.topologies:
iter140.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.nimbuses is not None:
oprot.writeFieldBegin('nimbuses', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.nimbuses))
for iter141 in self.nimbuses:
iter141.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.supervisors is None:
raise TProtocolException(message='Required field supervisors is unset!')
if self.topologies is None:
raise TProtocolException(message='Required field topologies is unset!')
if self.nimbuses is None:
raise TProtocolException(message='Required field nimbuses is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ErrorInfo(object):
"""
Attributes:
- error
- error_time_secs
- host
- port
"""
def __init__(self, error=None, error_time_secs=None, host=None, port=None,):
self.error = error
self.error_time_secs = error_time_secs
self.host = host
self.port = port
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.error = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.error_time_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ErrorInfo')
if self.error is not None:
oprot.writeFieldBegin('error', TType.STRING, 1)
oprot.writeString(self.error.encode('utf-8') if sys.version_info[0] == 2 else self.error)
oprot.writeFieldEnd()
if self.error_time_secs is not None:
oprot.writeFieldBegin('error_time_secs', TType.I32, 2)
oprot.writeI32(self.error_time_secs)
oprot.writeFieldEnd()
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 3)
oprot.writeString(self.host.encode('utf-8') if sys.version_info[0] == 2 else self.host)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 4)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.error is None:
raise TProtocolException(message='Required field error is unset!')
if self.error_time_secs is None:
raise TProtocolException(message='Required field error_time_secs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BoltStats(object):
"""
Attributes:
- acked
- failed
- process_ms_avg
- executed
- execute_ms_avg
"""
def __init__(self, acked=None, failed=None, process_ms_avg=None, executed=None, execute_ms_avg=None,):
self.acked = acked
self.failed = failed
self.process_ms_avg = process_ms_avg
self.executed = executed
self.execute_ms_avg = execute_ms_avg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.acked = {}
(_ktype143, _vtype144, _size142) = iprot.readMapBegin()
for _i146 in range(_size142):
_key147 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val148 = {}
(_ktype150, _vtype151, _size149) = iprot.readMapBegin()
for _i153 in range(_size149):
_key154 = GlobalStreamId()
_key154.read(iprot)
_val155 = iprot.readI64()
_val148[_key154] = _val155
iprot.readMapEnd()
self.acked[_key147] = _val148
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.failed = {}
(_ktype157, _vtype158, _size156) = iprot.readMapBegin()
for _i160 in range(_size156):
_key161 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val162 = {}
(_ktype164, _vtype165, _size163) = iprot.readMapBegin()
for _i167 in range(_size163):
_key168 = GlobalStreamId()
_key168.read(iprot)
_val169 = iprot.readI64()
_val162[_key168] = _val169
iprot.readMapEnd()
self.failed[_key161] = _val162
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.process_ms_avg = {}
(_ktype171, _vtype172, _size170) = iprot.readMapBegin()
for _i174 in range(_size170):
_key175 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val176 = {}
(_ktype178, _vtype179, _size177) = iprot.readMapBegin()
for _i181 in range(_size177):
_key182 = GlobalStreamId()
_key182.read(iprot)
_val183 = iprot.readDouble()
_val176[_key182] = _val183
iprot.readMapEnd()
self.process_ms_avg[_key175] = _val176
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.executed = {}
(_ktype185, _vtype186, _size184) = iprot.readMapBegin()
for _i188 in range(_size184):
_key189 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val190 = {}
(_ktype192, _vtype193, _size191) = iprot.readMapBegin()
for _i195 in range(_size191):
_key196 = GlobalStreamId()
_key196.read(iprot)
_val197 = iprot.readI64()
_val190[_key196] = _val197
iprot.readMapEnd()
self.executed[_key189] = _val190
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.MAP:
self.execute_ms_avg = {}
(_ktype199, _vtype200, _size198) = iprot.readMapBegin()
for _i202 in range(_size198):
_key203 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val204 = {}
(_ktype206, _vtype207, _size205) = iprot.readMapBegin()
for _i209 in range(_size205):
_key210 = GlobalStreamId()
_key210.read(iprot)
_val211 = iprot.readDouble()
_val204[_key210] = _val211
iprot.readMapEnd()
self.execute_ms_avg[_key203] = _val204
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('BoltStats')
if self.acked is not None:
oprot.writeFieldBegin('acked', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.acked))
for kiter212, viter213 in self.acked.items():
oprot.writeString(kiter212.encode('utf-8') if sys.version_info[0] == 2 else kiter212)
oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter213))
for kiter214, viter215 in viter213.items():
kiter214.write(oprot)
oprot.writeI64(viter215)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.failed is not None:
oprot.writeFieldBegin('failed', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.failed))
for kiter216, viter217 in self.failed.items():
oprot.writeString(kiter216.encode('utf-8') if sys.version_info[0] == 2 else kiter216)
oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter217))
for kiter218, viter219 in viter217.items():
kiter218.write(oprot)
oprot.writeI64(viter219)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.process_ms_avg is not None:
oprot.writeFieldBegin('process_ms_avg', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.process_ms_avg))
for kiter220, viter221 in self.process_ms_avg.items():
oprot.writeString(kiter220.encode('utf-8') if sys.version_info[0] == 2 else kiter220)
oprot.writeMapBegin(TType.STRUCT, TType.DOUBLE, len(viter221))
for kiter222, viter223 in viter221.items():
kiter222.write(oprot)
oprot.writeDouble(viter223)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.executed is not None:
oprot.writeFieldBegin('executed', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.executed))
for kiter224, viter225 in self.executed.items():
oprot.writeString(kiter224.encode('utf-8') if sys.version_info[0] == 2 else kiter224)
oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter225))
for kiter226, viter227 in viter225.items():
kiter226.write(oprot)
oprot.writeI64(viter227)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.execute_ms_avg is not None:
oprot.writeFieldBegin('execute_ms_avg', TType.MAP, 5)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.execute_ms_avg))
for kiter228, viter229 in self.execute_ms_avg.items():
oprot.writeString(kiter228.encode('utf-8') if sys.version_info[0] == 2 else kiter228)
oprot.writeMapBegin(TType.STRUCT, TType.DOUBLE, len(viter229))
for kiter230, viter231 in viter229.items():
kiter230.write(oprot)
oprot.writeDouble(viter231)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.acked is None:
raise TProtocolException(message='Required field acked is unset!')
if self.failed is None:
raise TProtocolException(message='Required field failed is unset!')
if self.process_ms_avg is None:
raise TProtocolException(message='Required field process_ms_avg is unset!')
if self.executed is None:
raise TProtocolException(message='Required field executed is unset!')
if self.execute_ms_avg is None:
raise TProtocolException(message='Required field execute_ms_avg is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SpoutStats(object):
"""
Attributes:
- acked
- failed
- complete_ms_avg
"""
def __init__(self, acked=None, failed=None, complete_ms_avg=None,):
self.acked = acked
self.failed = failed
self.complete_ms_avg = complete_ms_avg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.acked = {}
(_ktype233, _vtype234, _size232) = iprot.readMapBegin()
for _i236 in range(_size232):
_key237 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val238 = {}
(_ktype240, _vtype241, _size239) = iprot.readMapBegin()
for _i243 in range(_size239):
_key244 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val245 = iprot.readI64()
_val238[_key244] = _val245
iprot.readMapEnd()
self.acked[_key237] = _val238
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.failed = {}
(_ktype247, _vtype248, _size246) = iprot.readMapBegin()
for _i250 in range(_size246):
_key251 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val252 = {}
(_ktype254, _vtype255, _size253) = iprot.readMapBegin()
for _i257 in range(_size253):
_key258 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val259 = iprot.readI64()
_val252[_key258] = _val259
iprot.readMapEnd()
self.failed[_key251] = _val252
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.complete_ms_avg = {}
(_ktype261, _vtype262, _size260) = iprot.readMapBegin()
for _i264 in range(_size260):
_key265 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val266 = {}
(_ktype268, _vtype269, _size267) = iprot.readMapBegin()
for _i271 in range(_size267):
_key272 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val273 = iprot.readDouble()
_val266[_key272] = _val273
iprot.readMapEnd()
self.complete_ms_avg[_key265] = _val266
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SpoutStats')
if self.acked is not None:
oprot.writeFieldBegin('acked', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.acked))
for kiter274, viter275 in self.acked.items():
oprot.writeString(kiter274.encode('utf-8') if sys.version_info[0] == 2 else kiter274)
oprot.writeMapBegin(TType.STRING, TType.I64, len(viter275))
for kiter276, viter277 in viter275.items():
oprot.writeString(kiter276.encode('utf-8') if sys.version_info[0] == 2 else kiter276)
oprot.writeI64(viter277)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.failed is not None:
oprot.writeFieldBegin('failed', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.failed))
for kiter278, viter279 in self.failed.items():
oprot.writeString(kiter278.encode('utf-8') if sys.version_info[0] == 2 else kiter278)
oprot.writeMapBegin(TType.STRING, TType.I64, len(viter279))
for kiter280, viter281 in viter279.items():
oprot.writeString(kiter280.encode('utf-8') if sys.version_info[0] == 2 else kiter280)
oprot.writeI64(viter281)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.complete_ms_avg is not None:
oprot.writeFieldBegin('complete_ms_avg', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.complete_ms_avg))
for kiter282, viter283 in self.complete_ms_avg.items():
oprot.writeString(kiter282.encode('utf-8') if sys.version_info[0] == 2 else kiter282)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(viter283))
for kiter284, viter285 in viter283.items():
oprot.writeString(kiter284.encode('utf-8') if sys.version_info[0] == 2 else kiter284)
oprot.writeDouble(viter285)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.acked is None:
raise TProtocolException(message='Required field acked is unset!')
if self.failed is None:
raise TProtocolException(message='Required field failed is unset!')
if self.complete_ms_avg is None:
raise TProtocolException(message='Required field complete_ms_avg is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorSpecificStats(object):
"""
Attributes:
- bolt
- spout
"""
def __init__(self, bolt=None, spout=None,):
self.bolt = bolt
self.spout = spout
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bolt = BoltStats()
self.bolt.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.spout = SpoutStats()
self.spout.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ExecutorSpecificStats')
if self.bolt is not None:
oprot.writeFieldBegin('bolt', TType.STRUCT, 1)
self.bolt.write(oprot)
oprot.writeFieldEnd()
if self.spout is not None:
oprot.writeFieldBegin('spout', TType.STRUCT, 2)
self.spout.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorStats(object):
"""
Attributes:
- emitted
- transferred
- specific
- rate
"""
def __init__(self, emitted=None, transferred=None, specific=None, rate=None,):
self.emitted = emitted
self.transferred = transferred
self.specific = specific
self.rate = rate
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.emitted = {}
(_ktype287, _vtype288, _size286) = iprot.readMapBegin()
for _i290 in range(_size286):
_key291 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val292 = {}
(_ktype294, _vtype295, _size293) = iprot.readMapBegin()
for _i297 in range(_size293):
_key298 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val299 = iprot.readI64()
_val292[_key298] = _val299
iprot.readMapEnd()
self.emitted[_key291] = _val292
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.transferred = {}
(_ktype301, _vtype302, _size300) = iprot.readMapBegin()
for _i304 in range(_size300):
_key305 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val306 = {}
(_ktype308, _vtype309, _size307) = iprot.readMapBegin()
for _i311 in range(_size307):
_key312 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val313 = iprot.readI64()
_val306[_key312] = _val313
iprot.readMapEnd()
self.transferred[_key305] = _val306
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.specific = ExecutorSpecificStats()
self.specific.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.rate = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ExecutorStats')
if self.emitted is not None:
oprot.writeFieldBegin('emitted', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.emitted))
for kiter314, viter315 in self.emitted.items():
oprot.writeString(kiter314.encode('utf-8') if sys.version_info[0] == 2 else kiter314)
oprot.writeMapBegin(TType.STRING, TType.I64, len(viter315))
for kiter316, viter317 in viter315.items():
oprot.writeString(kiter316.encode('utf-8') if sys.version_info[0] == 2 else kiter316)
oprot.writeI64(viter317)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.transferred is not None:
oprot.writeFieldBegin('transferred', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.transferred))
for kiter318, viter319 in self.transferred.items():
oprot.writeString(kiter318.encode('utf-8') if sys.version_info[0] == 2 else kiter318)
oprot.writeMapBegin(TType.STRING, TType.I64, len(viter319))
for kiter320, viter321 in viter319.items():
oprot.writeString(kiter320.encode('utf-8') if sys.version_info[0] == 2 else kiter320)
oprot.writeI64(viter321)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.specific is not None:
oprot.writeFieldBegin('specific', TType.STRUCT, 3)
self.specific.write(oprot)
oprot.writeFieldEnd()
if self.rate is not None:
oprot.writeFieldBegin('rate', TType.DOUBLE, 4)
oprot.writeDouble(self.rate)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.emitted is None:
raise TProtocolException(message='Required field emitted is unset!')
if self.transferred is None:
raise TProtocolException(message='Required field transferred is unset!')
if self.specific is None:
raise TProtocolException(message='Required field specific is unset!')
if self.rate is None:
raise TProtocolException(message='Required field rate is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorInfo(object):
"""
Attributes:
- task_start
- task_end
"""
def __init__(self, task_start=None, task_end=None,):
self.task_start = task_start
self.task_end = task_end
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.task_start = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.task_end = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ExecutorInfo')
if self.task_start is not None:
oprot.writeFieldBegin('task_start', TType.I32, 1)
oprot.writeI32(self.task_start)
oprot.writeFieldEnd()
if self.task_end is not None:
oprot.writeFieldBegin('task_end', TType.I32, 2)
oprot.writeI32(self.task_end)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.task_start is None:
raise TProtocolException(message='Required field task_start is unset!')
if self.task_end is None:
raise TProtocolException(message='Required field task_end is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorSummary(object):
"""
Attributes:
- executor_info
- component_id
- host
- port
- uptime_secs
- stats
"""
def __init__(self, executor_info=None, component_id=None, host=None, port=None, uptime_secs=None, stats=None,):
self.executor_info = executor_info
self.component_id = component_id
self.host = host
self.port = port
self.uptime_secs = uptime_secs
self.stats = stats
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.executor_info = ExecutorInfo()
self.executor_info.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.component_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.stats = ExecutorStats()
self.stats.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ExecutorSummary')
if self.executor_info is not None:
oprot.writeFieldBegin('executor_info', TType.STRUCT, 1)
self.executor_info.write(oprot)
oprot.writeFieldEnd()
if self.component_id is not None:
oprot.writeFieldBegin('component_id', TType.STRING, 2)
oprot.writeString(self.component_id.encode('utf-8') if sys.version_info[0] == 2 else self.component_id)
oprot.writeFieldEnd()
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 3)
oprot.writeString(self.host.encode('utf-8') if sys.version_info[0] == 2 else self.host)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 4)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 5)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.stats is not None:
oprot.writeFieldBegin('stats', TType.STRUCT, 7)
self.stats.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.executor_info is None:
raise TProtocolException(message='Required field executor_info is unset!')
if self.component_id is None:
raise TProtocolException(message='Required field component_id is unset!')
if self.host is None:
raise TProtocolException(message='Required field host is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
if self.uptime_secs is None:
raise TProtocolException(message='Required field uptime_secs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DebugOptions(object):
"""
Attributes:
- enable
- samplingpct
"""
def __init__(self, enable=None, samplingpct=None,):
self.enable = enable
self.samplingpct = samplingpct
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BOOL:
self.enable = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.samplingpct = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DebugOptions')
if self.enable is not None:
oprot.writeFieldBegin('enable', TType.BOOL, 1)
oprot.writeBool(self.enable)
oprot.writeFieldEnd()
if self.samplingpct is not None:
oprot.writeFieldBegin('samplingpct', TType.DOUBLE, 2)
oprot.writeDouble(self.samplingpct)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TopologyInfo(object):
"""
Attributes:
- id
- name
- uptime_secs
- executors
- status
- errors
- component_debug
- storm_version
- sched_status
- owner
- replication_count
- requested_memonheap
- requested_memoffheap
- requested_cpu
- assigned_memonheap
- assigned_memoffheap
- assigned_cpu
"""
def __init__(self, id=None, name=None, uptime_secs=None, executors=None, status=None, errors=None, component_debug=None, storm_version=None, sched_status=None, owner=None, replication_count=None, requested_memonheap=None, requested_memoffheap=None, requested_cpu=None, assigned_memonheap=None, assigned_memoffheap=None, assigned_cpu=None,):
self.id = id
self.name = name
self.uptime_secs = uptime_secs
self.executors = executors
self.status = status
self.errors = errors
self.component_debug = component_debug
self.storm_version = storm_version
self.sched_status = sched_status
self.owner = owner
self.replication_count = replication_count
self.requested_memonheap = requested_memonheap
self.requested_memoffheap = requested_memoffheap
self.requested_cpu = requested_cpu
self.assigned_memonheap = assigned_memonheap
self.assigned_memoffheap = assigned_memoffheap
self.assigned_cpu = assigned_cpu
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.executors = []
(_etype325, _size322) = iprot.readListBegin()
for _i326 in range(_size322):
_elem327 = ExecutorSummary()
_elem327.read(iprot)
self.executors.append(_elem327)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.status = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.MAP:
self.errors = {}
(_ktype329, _vtype330, _size328) = iprot.readMapBegin()
for _i332 in range(_size328):
_key333 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val334 = []
(_etype338, _size335) = iprot.readListBegin()
for _i339 in range(_size335):
_elem340 = ErrorInfo()
_elem340.read(iprot)
_val334.append(_elem340)
iprot.readListEnd()
self.errors[_key333] = _val334
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.MAP:
self.component_debug = {}
(_ktype342, _vtype343, _size341) = iprot.readMapBegin()
for _i345 in range(_size341):
_key346 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val347 = DebugOptions()
_val347.read(iprot)
self.component_debug[_key346] = _val347
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.storm_version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 513:
if ftype == TType.STRING:
self.sched_status = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 514:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 515:
if ftype == TType.I32:
self.replication_count = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 521:
if ftype == TType.DOUBLE:
self.requested_memonheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 522:
if ftype == TType.DOUBLE:
self.requested_memoffheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 523:
if ftype == TType.DOUBLE:
self.requested_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 524:
if ftype == TType.DOUBLE:
self.assigned_memonheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 525:
if ftype == TType.DOUBLE:
self.assigned_memoffheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 526:
if ftype == TType.DOUBLE:
self.assigned_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('TopologyInfo')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 3)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.executors is not None:
oprot.writeFieldBegin('executors', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.executors))
for iter348 in self.executors:
iter348.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRING, 5)
oprot.writeString(self.status.encode('utf-8') if sys.version_info[0] == 2 else self.status)
oprot.writeFieldEnd()
if self.errors is not None:
oprot.writeFieldBegin('errors', TType.MAP, 6)
oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.errors))
for kiter349, viter350 in self.errors.items():
oprot.writeString(kiter349.encode('utf-8') if sys.version_info[0] == 2 else kiter349)
oprot.writeListBegin(TType.STRUCT, len(viter350))
for iter351 in viter350:
iter351.write(oprot)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.component_debug is not None:
oprot.writeFieldBegin('component_debug', TType.MAP, 7)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.component_debug))
for kiter352, viter353 in self.component_debug.items():
oprot.writeString(kiter352.encode('utf-8') if sys.version_info[0] == 2 else kiter352)
viter353.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.storm_version is not None:
oprot.writeFieldBegin('storm_version', TType.STRING, 8)
oprot.writeString(self.storm_version.encode('utf-8') if sys.version_info[0] == 2 else self.storm_version)
oprot.writeFieldEnd()
if self.sched_status is not None:
oprot.writeFieldBegin('sched_status', TType.STRING, 513)
oprot.writeString(self.sched_status.encode('utf-8') if sys.version_info[0] == 2 else self.sched_status)
oprot.writeFieldEnd()
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 514)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
if self.replication_count is not None:
oprot.writeFieldBegin('replication_count', TType.I32, 515)
oprot.writeI32(self.replication_count)
oprot.writeFieldEnd()
if self.requested_memonheap is not None:
oprot.writeFieldBegin('requested_memonheap', TType.DOUBLE, 521)
oprot.writeDouble(self.requested_memonheap)
oprot.writeFieldEnd()
if self.requested_memoffheap is not None:
oprot.writeFieldBegin('requested_memoffheap', TType.DOUBLE, 522)
oprot.writeDouble(self.requested_memoffheap)
oprot.writeFieldEnd()
if self.requested_cpu is not None:
oprot.writeFieldBegin('requested_cpu', TType.DOUBLE, 523)
oprot.writeDouble(self.requested_cpu)
oprot.writeFieldEnd()
if self.assigned_memonheap is not None:
oprot.writeFieldBegin('assigned_memonheap', TType.DOUBLE, 524)
oprot.writeDouble(self.assigned_memonheap)
oprot.writeFieldEnd()
if self.assigned_memoffheap is not None:
oprot.writeFieldBegin('assigned_memoffheap', TType.DOUBLE, 525)
oprot.writeDouble(self.assigned_memoffheap)
oprot.writeFieldEnd()
if self.assigned_cpu is not None:
oprot.writeFieldBegin('assigned_cpu', TType.DOUBLE, 526)
oprot.writeDouble(self.assigned_cpu)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.id is None:
raise TProtocolException(message='Required field id is unset!')
if self.name is None:
raise TProtocolException(message='Required field name is unset!')
if self.uptime_secs is None:
raise TProtocolException(message='Required field uptime_secs is unset!')
if self.executors is None:
raise TProtocolException(message='Required field executors is unset!')
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
if self.errors is None:
raise TProtocolException(message='Required field errors is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CommonAggregateStats(object):
"""
Attributes:
- num_executors
- num_tasks
- emitted
- transferred
- acked
- failed
- resources_map
"""
def __init__(self, num_executors=None, num_tasks=None, emitted=None, transferred=None, acked=None, failed=None, resources_map=None,):
self.num_executors = num_executors
self.num_tasks = num_tasks
self.emitted = emitted
self.transferred = transferred
self.acked = acked
self.failed = failed
self.resources_map = resources_map
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.num_executors = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.num_tasks = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.emitted = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.transferred = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I64:
self.acked = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.failed = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.MAP:
self.resources_map = {}
(_ktype355, _vtype356, _size354) = iprot.readMapBegin()
for _i358 in range(_size354):
_key359 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val360 = iprot.readDouble()
self.resources_map[_key359] = _val360
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('CommonAggregateStats')
if self.num_executors is not None:
oprot.writeFieldBegin('num_executors', TType.I32, 1)
oprot.writeI32(self.num_executors)
oprot.writeFieldEnd()
if self.num_tasks is not None:
oprot.writeFieldBegin('num_tasks', TType.I32, 2)
oprot.writeI32(self.num_tasks)
oprot.writeFieldEnd()
if self.emitted is not None:
oprot.writeFieldBegin('emitted', TType.I64, 3)
oprot.writeI64(self.emitted)
oprot.writeFieldEnd()
if self.transferred is not None:
oprot.writeFieldBegin('transferred', TType.I64, 4)
oprot.writeI64(self.transferred)
oprot.writeFieldEnd()
if self.acked is not None:
oprot.writeFieldBegin('acked', TType.I64, 5)
oprot.writeI64(self.acked)
oprot.writeFieldEnd()
if self.failed is not None:
oprot.writeFieldBegin('failed', TType.I64, 6)
oprot.writeI64(self.failed)
oprot.writeFieldEnd()
if self.resources_map is not None:
oprot.writeFieldBegin('resources_map', TType.MAP, 7)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.resources_map))
for kiter361, viter362 in self.resources_map.items():
oprot.writeString(kiter361.encode('utf-8') if sys.version_info[0] == 2 else kiter361)
oprot.writeDouble(viter362)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SpoutAggregateStats(object):
"""
Attributes:
- complete_latency_ms
"""
def __init__(self, complete_latency_ms=None,):
self.complete_latency_ms = complete_latency_ms
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.complete_latency_ms = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SpoutAggregateStats')
if self.complete_latency_ms is not None:
oprot.writeFieldBegin('complete_latency_ms', TType.DOUBLE, 1)
oprot.writeDouble(self.complete_latency_ms)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BoltAggregateStats(object):
"""
Attributes:
- execute_latency_ms
- process_latency_ms
- executed
- capacity
"""
def __init__(self, execute_latency_ms=None, process_latency_ms=None, executed=None, capacity=None,):
self.execute_latency_ms = execute_latency_ms
self.process_latency_ms = process_latency_ms
self.executed = executed
self.capacity = capacity
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.execute_latency_ms = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.process_latency_ms = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.executed = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.capacity = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('BoltAggregateStats')
if self.execute_latency_ms is not None:
oprot.writeFieldBegin('execute_latency_ms', TType.DOUBLE, 1)
oprot.writeDouble(self.execute_latency_ms)
oprot.writeFieldEnd()
if self.process_latency_ms is not None:
oprot.writeFieldBegin('process_latency_ms', TType.DOUBLE, 2)
oprot.writeDouble(self.process_latency_ms)
oprot.writeFieldEnd()
if self.executed is not None:
oprot.writeFieldBegin('executed', TType.I64, 3)
oprot.writeI64(self.executed)
oprot.writeFieldEnd()
if self.capacity is not None:
oprot.writeFieldBegin('capacity', TType.DOUBLE, 4)
oprot.writeDouble(self.capacity)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SpecificAggregateStats(object):
"""
Attributes:
- bolt
- spout
"""
def __init__(self, bolt=None, spout=None,):
self.bolt = bolt
self.spout = spout
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.bolt = BoltAggregateStats()
self.bolt.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.spout = SpoutAggregateStats()
self.spout.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SpecificAggregateStats')
if self.bolt is not None:
oprot.writeFieldBegin('bolt', TType.STRUCT, 1)
self.bolt.write(oprot)
oprot.writeFieldEnd()
if self.spout is not None:
oprot.writeFieldBegin('spout', TType.STRUCT, 2)
self.spout.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ComponentAggregateStats(object):
"""
Attributes:
- type
- common_stats
- specific_stats
- last_error
"""
def __init__(self, type=None, common_stats=None, specific_stats=None, last_error=None,):
self.type = type
self.common_stats = common_stats
self.specific_stats = specific_stats
self.last_error = last_error
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.common_stats = CommonAggregateStats()
self.common_stats.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.specific_stats = SpecificAggregateStats()
self.specific_stats.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.last_error = ErrorInfo()
self.last_error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ComponentAggregateStats')
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 1)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.common_stats is not None:
oprot.writeFieldBegin('common_stats', TType.STRUCT, 2)
self.common_stats.write(oprot)
oprot.writeFieldEnd()
if self.specific_stats is not None:
oprot.writeFieldBegin('specific_stats', TType.STRUCT, 3)
self.specific_stats.write(oprot)
oprot.writeFieldEnd()
if self.last_error is not None:
oprot.writeFieldBegin('last_error', TType.STRUCT, 4)
self.last_error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TopologyStats(object):
"""
Attributes:
- window_to_emitted
- window_to_transferred
- window_to_complete_latencies_ms
- window_to_acked
- window_to_failed
"""
def __init__(self, window_to_emitted=None, window_to_transferred=None, window_to_complete_latencies_ms=None, window_to_acked=None, window_to_failed=None,):
self.window_to_emitted = window_to_emitted
self.window_to_transferred = window_to_transferred
self.window_to_complete_latencies_ms = window_to_complete_latencies_ms
self.window_to_acked = window_to_acked
self.window_to_failed = window_to_failed
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.window_to_emitted = {}
(_ktype364, _vtype365, _size363) = iprot.readMapBegin()
for _i367 in range(_size363):
_key368 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val369 = iprot.readI64()
self.window_to_emitted[_key368] = _val369
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.window_to_transferred = {}
(_ktype371, _vtype372, _size370) = iprot.readMapBegin()
for _i374 in range(_size370):
_key375 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val376 = iprot.readI64()
self.window_to_transferred[_key375] = _val376
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.window_to_complete_latencies_ms = {}
(_ktype378, _vtype379, _size377) = iprot.readMapBegin()
for _i381 in range(_size377):
_key382 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val383 = iprot.readDouble()
self.window_to_complete_latencies_ms[_key382] = _val383
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.window_to_acked = {}
(_ktype385, _vtype386, _size384) = iprot.readMapBegin()
for _i388 in range(_size384):
_key389 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val390 = iprot.readI64()
self.window_to_acked[_key389] = _val390
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.MAP:
self.window_to_failed = {}
(_ktype392, _vtype393, _size391) = iprot.readMapBegin()
for _i395 in range(_size391):
_key396 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val397 = iprot.readI64()
self.window_to_failed[_key396] = _val397
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('TopologyStats')
if self.window_to_emitted is not None:
oprot.writeFieldBegin('window_to_emitted', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.I64, len(self.window_to_emitted))
for kiter398, viter399 in self.window_to_emitted.items():
oprot.writeString(kiter398.encode('utf-8') if sys.version_info[0] == 2 else kiter398)
oprot.writeI64(viter399)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.window_to_transferred is not None:
oprot.writeFieldBegin('window_to_transferred', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.I64, len(self.window_to_transferred))
for kiter400, viter401 in self.window_to_transferred.items():
oprot.writeString(kiter400.encode('utf-8') if sys.version_info[0] == 2 else kiter400)
oprot.writeI64(viter401)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.window_to_complete_latencies_ms is not None:
oprot.writeFieldBegin('window_to_complete_latencies_ms', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.window_to_complete_latencies_ms))
for kiter402, viter403 in self.window_to_complete_latencies_ms.items():
oprot.writeString(kiter402.encode('utf-8') if sys.version_info[0] == 2 else kiter402)
oprot.writeDouble(viter403)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.window_to_acked is not None:
oprot.writeFieldBegin('window_to_acked', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.I64, len(self.window_to_acked))
for kiter404, viter405 in self.window_to_acked.items():
oprot.writeString(kiter404.encode('utf-8') if sys.version_info[0] == 2 else kiter404)
oprot.writeI64(viter405)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.window_to_failed is not None:
oprot.writeFieldBegin('window_to_failed', TType.MAP, 5)
oprot.writeMapBegin(TType.STRING, TType.I64, len(self.window_to_failed))
for kiter406, viter407 in self.window_to_failed.items():
oprot.writeString(kiter406.encode('utf-8') if sys.version_info[0] == 2 else kiter406)
oprot.writeI64(viter407)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class WorkerSummary(object):
"""
Attributes:
- supervisor_id
- host
- port
- topology_id
- topology_name
- num_executors
- component_to_num_tasks
- time_secs
- uptime_secs
- requested_memonheap
- requested_memoffheap
- requested_cpu
- assigned_memonheap
- assigned_memoffheap
- assigned_cpu
- owner
"""
def __init__(self, supervisor_id=None, host=None, port=None, topology_id=None, topology_name=None, num_executors=None, component_to_num_tasks=None, time_secs=None, uptime_secs=None, requested_memonheap=None, requested_memoffheap=None, requested_cpu=None, assigned_memonheap=None, assigned_memoffheap=None, assigned_cpu=None, owner=None,):
self.supervisor_id = supervisor_id
self.host = host
self.port = port
self.topology_id = topology_id
self.topology_name = topology_name
self.num_executors = num_executors
self.component_to_num_tasks = component_to_num_tasks
self.time_secs = time_secs
self.uptime_secs = uptime_secs
self.requested_memonheap = requested_memonheap
self.requested_memoffheap = requested_memoffheap
self.requested_cpu = requested_cpu
self.assigned_memonheap = assigned_memonheap
self.assigned_memoffheap = assigned_memoffheap
self.assigned_cpu = assigned_cpu
self.owner = owner
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.supervisor_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.topology_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.topology_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.num_executors = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.MAP:
self.component_to_num_tasks = {}
(_ktype409, _vtype410, _size408) = iprot.readMapBegin()
for _i412 in range(_size408):
_key413 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val414 = iprot.readI64()
self.component_to_num_tasks[_key413] = _val414
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.I32:
self.time_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 521:
if ftype == TType.DOUBLE:
self.requested_memonheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 522:
if ftype == TType.DOUBLE:
self.requested_memoffheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 523:
if ftype == TType.DOUBLE:
self.requested_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 524:
if ftype == TType.DOUBLE:
self.assigned_memonheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 525:
if ftype == TType.DOUBLE:
self.assigned_memoffheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 526:
if ftype == TType.DOUBLE:
self.assigned_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 527:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('WorkerSummary')
if self.supervisor_id is not None:
oprot.writeFieldBegin('supervisor_id', TType.STRING, 1)
oprot.writeString(self.supervisor_id.encode('utf-8') if sys.version_info[0] == 2 else self.supervisor_id)
oprot.writeFieldEnd()
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 2)
oprot.writeString(self.host.encode('utf-8') if sys.version_info[0] == 2 else self.host)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 3)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
if self.topology_id is not None:
oprot.writeFieldBegin('topology_id', TType.STRING, 4)
oprot.writeString(self.topology_id.encode('utf-8') if sys.version_info[0] == 2 else self.topology_id)
oprot.writeFieldEnd()
if self.topology_name is not None:
oprot.writeFieldBegin('topology_name', TType.STRING, 5)
oprot.writeString(self.topology_name.encode('utf-8') if sys.version_info[0] == 2 else self.topology_name)
oprot.writeFieldEnd()
if self.num_executors is not None:
oprot.writeFieldBegin('num_executors', TType.I32, 6)
oprot.writeI32(self.num_executors)
oprot.writeFieldEnd()
if self.component_to_num_tasks is not None:
oprot.writeFieldBegin('component_to_num_tasks', TType.MAP, 7)
oprot.writeMapBegin(TType.STRING, TType.I64, len(self.component_to_num_tasks))
for kiter415, viter416 in self.component_to_num_tasks.items():
oprot.writeString(kiter415.encode('utf-8') if sys.version_info[0] == 2 else kiter415)
oprot.writeI64(viter416)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.time_secs is not None:
oprot.writeFieldBegin('time_secs', TType.I32, 8)
oprot.writeI32(self.time_secs)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 9)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.requested_memonheap is not None:
oprot.writeFieldBegin('requested_memonheap', TType.DOUBLE, 521)
oprot.writeDouble(self.requested_memonheap)
oprot.writeFieldEnd()
if self.requested_memoffheap is not None:
oprot.writeFieldBegin('requested_memoffheap', TType.DOUBLE, 522)
oprot.writeDouble(self.requested_memoffheap)
oprot.writeFieldEnd()
if self.requested_cpu is not None:
oprot.writeFieldBegin('requested_cpu', TType.DOUBLE, 523)
oprot.writeDouble(self.requested_cpu)
oprot.writeFieldEnd()
if self.assigned_memonheap is not None:
oprot.writeFieldBegin('assigned_memonheap', TType.DOUBLE, 524)
oprot.writeDouble(self.assigned_memonheap)
oprot.writeFieldEnd()
if self.assigned_memoffheap is not None:
oprot.writeFieldBegin('assigned_memoffheap', TType.DOUBLE, 525)
oprot.writeDouble(self.assigned_memoffheap)
oprot.writeFieldEnd()
if self.assigned_cpu is not None:
oprot.writeFieldBegin('assigned_cpu', TType.DOUBLE, 526)
oprot.writeDouble(self.assigned_cpu)
oprot.writeFieldEnd()
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 527)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SupervisorPageInfo(object):
"""
Attributes:
- supervisor_summaries
- worker_summaries
"""
def __init__(self, supervisor_summaries=None, worker_summaries=None,):
self.supervisor_summaries = supervisor_summaries
self.worker_summaries = worker_summaries
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.supervisor_summaries = []
(_etype420, _size417) = iprot.readListBegin()
for _i421 in range(_size417):
_elem422 = SupervisorSummary()
_elem422.read(iprot)
self.supervisor_summaries.append(_elem422)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.worker_summaries = []
(_etype426, _size423) = iprot.readListBegin()
for _i427 in range(_size423):
_elem428 = WorkerSummary()
_elem428.read(iprot)
self.worker_summaries.append(_elem428)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SupervisorPageInfo')
if self.supervisor_summaries is not None:
oprot.writeFieldBegin('supervisor_summaries', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.supervisor_summaries))
for iter429 in self.supervisor_summaries:
iter429.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.worker_summaries is not None:
oprot.writeFieldBegin('worker_summaries', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.worker_summaries))
for iter430 in self.worker_summaries:
iter430.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TopologyPageInfo(object):
"""
Attributes:
- id
- name
- uptime_secs
- status
- num_tasks
- num_workers
- num_executors
- topology_conf
- id_to_spout_agg_stats
- id_to_bolt_agg_stats
- sched_status
- topology_stats
- owner
- debug_options
- replication_count
- workers
- storm_version
- topology_version
- requested_memonheap
- requested_memoffheap
- requested_cpu
- assigned_memonheap
- assigned_memoffheap
- assigned_cpu
- requested_regular_on_heap_memory
- requested_shared_on_heap_memory
- requested_regular_off_heap_memory
- requested_shared_off_heap_memory
- assigned_regular_on_heap_memory
- assigned_shared_on_heap_memory
- assigned_regular_off_heap_memory
- assigned_shared_off_heap_memory
"""
def __init__(self, id=None, name=None, uptime_secs=None, status=None, num_tasks=None, num_workers=None, num_executors=None, topology_conf=None, id_to_spout_agg_stats=None, id_to_bolt_agg_stats=None, sched_status=None, topology_stats=None, owner=None, debug_options=None, replication_count=None, workers=None, storm_version=None, topology_version=None, requested_memonheap=None, requested_memoffheap=None, requested_cpu=None, assigned_memonheap=None, assigned_memoffheap=None, assigned_cpu=None, requested_regular_on_heap_memory=None, requested_shared_on_heap_memory=None, requested_regular_off_heap_memory=None, requested_shared_off_heap_memory=None, assigned_regular_on_heap_memory=None, assigned_shared_on_heap_memory=None, assigned_regular_off_heap_memory=None, assigned_shared_off_heap_memory=None,):
self.id = id
self.name = name
self.uptime_secs = uptime_secs
self.status = status
self.num_tasks = num_tasks
self.num_workers = num_workers
self.num_executors = num_executors
self.topology_conf = topology_conf
self.id_to_spout_agg_stats = id_to_spout_agg_stats
self.id_to_bolt_agg_stats = id_to_bolt_agg_stats
self.sched_status = sched_status
self.topology_stats = topology_stats
self.owner = owner
self.debug_options = debug_options
self.replication_count = replication_count
self.workers = workers
self.storm_version = storm_version
self.topology_version = topology_version
self.requested_memonheap = requested_memonheap
self.requested_memoffheap = requested_memoffheap
self.requested_cpu = requested_cpu
self.assigned_memonheap = assigned_memonheap
self.assigned_memoffheap = assigned_memoffheap
self.assigned_cpu = assigned_cpu
self.requested_regular_on_heap_memory = requested_regular_on_heap_memory
self.requested_shared_on_heap_memory = requested_shared_on_heap_memory
self.requested_regular_off_heap_memory = requested_regular_off_heap_memory
self.requested_shared_off_heap_memory = requested_shared_off_heap_memory
self.assigned_regular_on_heap_memory = assigned_regular_on_heap_memory
self.assigned_shared_on_heap_memory = assigned_shared_on_heap_memory
self.assigned_regular_off_heap_memory = assigned_regular_off_heap_memory
self.assigned_shared_off_heap_memory = assigned_shared_off_heap_memory
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.status = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.num_tasks = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.num_workers = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.I32:
self.num_executors = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.topology_conf = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.MAP:
self.id_to_spout_agg_stats = {}
(_ktype432, _vtype433, _size431) = iprot.readMapBegin()
for _i435 in range(_size431):
_key436 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val437 = ComponentAggregateStats()
_val437.read(iprot)
self.id_to_spout_agg_stats[_key436] = _val437
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.MAP:
self.id_to_bolt_agg_stats = {}
(_ktype439, _vtype440, _size438) = iprot.readMapBegin()
for _i442 in range(_size438):
_key443 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val444 = ComponentAggregateStats()
_val444.read(iprot)
self.id_to_bolt_agg_stats[_key443] = _val444
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.STRING:
self.sched_status = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.STRUCT:
self.topology_stats = TopologyStats()
self.topology_stats.read(iprot)
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.STRUCT:
self.debug_options = DebugOptions()
self.debug_options.read(iprot)
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.I32:
self.replication_count = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.LIST:
self.workers = []
(_etype448, _size445) = iprot.readListBegin()
for _i449 in range(_size445):
_elem450 = WorkerSummary()
_elem450.read(iprot)
self.workers.append(_elem450)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.STRING:
self.storm_version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.STRING:
self.topology_version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 521:
if ftype == TType.DOUBLE:
self.requested_memonheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 522:
if ftype == TType.DOUBLE:
self.requested_memoffheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 523:
if ftype == TType.DOUBLE:
self.requested_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 524:
if ftype == TType.DOUBLE:
self.assigned_memonheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 525:
if ftype == TType.DOUBLE:
self.assigned_memoffheap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 526:
if ftype == TType.DOUBLE:
self.assigned_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 527:
if ftype == TType.DOUBLE:
self.requested_regular_on_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 528:
if ftype == TType.DOUBLE:
self.requested_shared_on_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 529:
if ftype == TType.DOUBLE:
self.requested_regular_off_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 530:
if ftype == TType.DOUBLE:
self.requested_shared_off_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 531:
if ftype == TType.DOUBLE:
self.assigned_regular_on_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 532:
if ftype == TType.DOUBLE:
self.assigned_shared_on_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 533:
if ftype == TType.DOUBLE:
self.assigned_regular_off_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 534:
if ftype == TType.DOUBLE:
self.assigned_shared_off_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('TopologyPageInfo')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 3)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRING, 4)
oprot.writeString(self.status.encode('utf-8') if sys.version_info[0] == 2 else self.status)
oprot.writeFieldEnd()
if self.num_tasks is not None:
oprot.writeFieldBegin('num_tasks', TType.I32, 5)
oprot.writeI32(self.num_tasks)
oprot.writeFieldEnd()
if self.num_workers is not None:
oprot.writeFieldBegin('num_workers', TType.I32, 6)
oprot.writeI32(self.num_workers)
oprot.writeFieldEnd()
if self.num_executors is not None:
oprot.writeFieldBegin('num_executors', TType.I32, 7)
oprot.writeI32(self.num_executors)
oprot.writeFieldEnd()
if self.topology_conf is not None:
oprot.writeFieldBegin('topology_conf', TType.STRING, 8)
oprot.writeString(self.topology_conf.encode('utf-8') if sys.version_info[0] == 2 else self.topology_conf)
oprot.writeFieldEnd()
if self.id_to_spout_agg_stats is not None:
oprot.writeFieldBegin('id_to_spout_agg_stats', TType.MAP, 9)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.id_to_spout_agg_stats))
for kiter451, viter452 in self.id_to_spout_agg_stats.items():
oprot.writeString(kiter451.encode('utf-8') if sys.version_info[0] == 2 else kiter451)
viter452.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.id_to_bolt_agg_stats is not None:
oprot.writeFieldBegin('id_to_bolt_agg_stats', TType.MAP, 10)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.id_to_bolt_agg_stats))
for kiter453, viter454 in self.id_to_bolt_agg_stats.items():
oprot.writeString(kiter453.encode('utf-8') if sys.version_info[0] == 2 else kiter453)
viter454.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.sched_status is not None:
oprot.writeFieldBegin('sched_status', TType.STRING, 11)
oprot.writeString(self.sched_status.encode('utf-8') if sys.version_info[0] == 2 else self.sched_status)
oprot.writeFieldEnd()
if self.topology_stats is not None:
oprot.writeFieldBegin('topology_stats', TType.STRUCT, 12)
self.topology_stats.write(oprot)
oprot.writeFieldEnd()
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 13)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
if self.debug_options is not None:
oprot.writeFieldBegin('debug_options', TType.STRUCT, 14)
self.debug_options.write(oprot)
oprot.writeFieldEnd()
if self.replication_count is not None:
oprot.writeFieldBegin('replication_count', TType.I32, 15)
oprot.writeI32(self.replication_count)
oprot.writeFieldEnd()
if self.workers is not None:
oprot.writeFieldBegin('workers', TType.LIST, 16)
oprot.writeListBegin(TType.STRUCT, len(self.workers))
for iter455 in self.workers:
iter455.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.storm_version is not None:
oprot.writeFieldBegin('storm_version', TType.STRING, 17)
oprot.writeString(self.storm_version.encode('utf-8') if sys.version_info[0] == 2 else self.storm_version)
oprot.writeFieldEnd()
if self.topology_version is not None:
oprot.writeFieldBegin('topology_version', TType.STRING, 18)
oprot.writeString(self.topology_version.encode('utf-8') if sys.version_info[0] == 2 else self.topology_version)
oprot.writeFieldEnd()
if self.requested_memonheap is not None:
oprot.writeFieldBegin('requested_memonheap', TType.DOUBLE, 521)
oprot.writeDouble(self.requested_memonheap)
oprot.writeFieldEnd()
if self.requested_memoffheap is not None:
oprot.writeFieldBegin('requested_memoffheap', TType.DOUBLE, 522)
oprot.writeDouble(self.requested_memoffheap)
oprot.writeFieldEnd()
if self.requested_cpu is not None:
oprot.writeFieldBegin('requested_cpu', TType.DOUBLE, 523)
oprot.writeDouble(self.requested_cpu)
oprot.writeFieldEnd()
if self.assigned_memonheap is not None:
oprot.writeFieldBegin('assigned_memonheap', TType.DOUBLE, 524)
oprot.writeDouble(self.assigned_memonheap)
oprot.writeFieldEnd()
if self.assigned_memoffheap is not None:
oprot.writeFieldBegin('assigned_memoffheap', TType.DOUBLE, 525)
oprot.writeDouble(self.assigned_memoffheap)
oprot.writeFieldEnd()
if self.assigned_cpu is not None:
oprot.writeFieldBegin('assigned_cpu', TType.DOUBLE, 526)
oprot.writeDouble(self.assigned_cpu)
oprot.writeFieldEnd()
if self.requested_regular_on_heap_memory is not None:
oprot.writeFieldBegin('requested_regular_on_heap_memory', TType.DOUBLE, 527)
oprot.writeDouble(self.requested_regular_on_heap_memory)
oprot.writeFieldEnd()
if self.requested_shared_on_heap_memory is not None:
oprot.writeFieldBegin('requested_shared_on_heap_memory', TType.DOUBLE, 528)
oprot.writeDouble(self.requested_shared_on_heap_memory)
oprot.writeFieldEnd()
if self.requested_regular_off_heap_memory is not None:
oprot.writeFieldBegin('requested_regular_off_heap_memory', TType.DOUBLE, 529)
oprot.writeDouble(self.requested_regular_off_heap_memory)
oprot.writeFieldEnd()
if self.requested_shared_off_heap_memory is not None:
oprot.writeFieldBegin('requested_shared_off_heap_memory', TType.DOUBLE, 530)
oprot.writeDouble(self.requested_shared_off_heap_memory)
oprot.writeFieldEnd()
if self.assigned_regular_on_heap_memory is not None:
oprot.writeFieldBegin('assigned_regular_on_heap_memory', TType.DOUBLE, 531)
oprot.writeDouble(self.assigned_regular_on_heap_memory)
oprot.writeFieldEnd()
if self.assigned_shared_on_heap_memory is not None:
oprot.writeFieldBegin('assigned_shared_on_heap_memory', TType.DOUBLE, 532)
oprot.writeDouble(self.assigned_shared_on_heap_memory)
oprot.writeFieldEnd()
if self.assigned_regular_off_heap_memory is not None:
oprot.writeFieldBegin('assigned_regular_off_heap_memory', TType.DOUBLE, 533)
oprot.writeDouble(self.assigned_regular_off_heap_memory)
oprot.writeFieldEnd()
if self.assigned_shared_off_heap_memory is not None:
oprot.writeFieldBegin('assigned_shared_off_heap_memory', TType.DOUBLE, 534)
oprot.writeDouble(self.assigned_shared_off_heap_memory)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.id is None:
raise TProtocolException(message='Required field id is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecutorAggregateStats(object):
"""
Attributes:
- exec_summary
- stats
"""
def __init__(self, exec_summary=None, stats=None,):
self.exec_summary = exec_summary
self.stats = stats
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.exec_summary = ExecutorSummary()
self.exec_summary.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.stats = ComponentAggregateStats()
self.stats.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ExecutorAggregateStats')
if self.exec_summary is not None:
oprot.writeFieldBegin('exec_summary', TType.STRUCT, 1)
self.exec_summary.write(oprot)
oprot.writeFieldEnd()
if self.stats is not None:
oprot.writeFieldBegin('stats', TType.STRUCT, 2)
self.stats.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ComponentPageInfo(object):
"""
Attributes:
- component_id
- component_type
- topology_id
- topology_name
- num_executors
- num_tasks
- window_to_stats
- gsid_to_input_stats
- sid_to_output_stats
- exec_stats
- errors
- eventlog_host
- eventlog_port
- debug_options
- topology_status
- resources_map
"""
def __init__(self, component_id=None, component_type=None, topology_id=None, topology_name=None, num_executors=None, num_tasks=None, window_to_stats=None, gsid_to_input_stats=None, sid_to_output_stats=None, exec_stats=None, errors=None, eventlog_host=None, eventlog_port=None, debug_options=None, topology_status=None, resources_map=None,):
self.component_id = component_id
self.component_type = component_type
self.topology_id = topology_id
self.topology_name = topology_name
self.num_executors = num_executors
self.num_tasks = num_tasks
self.window_to_stats = window_to_stats
self.gsid_to_input_stats = gsid_to_input_stats
self.sid_to_output_stats = sid_to_output_stats
self.exec_stats = exec_stats
self.errors = errors
self.eventlog_host = eventlog_host
self.eventlog_port = eventlog_port
self.debug_options = debug_options
self.topology_status = topology_status
self.resources_map = resources_map
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.component_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.component_type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.topology_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.topology_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.num_executors = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.num_tasks = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.MAP:
self.window_to_stats = {}
(_ktype457, _vtype458, _size456) = iprot.readMapBegin()
for _i460 in range(_size456):
_key461 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val462 = ComponentAggregateStats()
_val462.read(iprot)
self.window_to_stats[_key461] = _val462
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.MAP:
self.gsid_to_input_stats = {}
(_ktype464, _vtype465, _size463) = iprot.readMapBegin()
for _i467 in range(_size463):
_key468 = GlobalStreamId()
_key468.read(iprot)
_val469 = ComponentAggregateStats()
_val469.read(iprot)
self.gsid_to_input_stats[_key468] = _val469
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.MAP:
self.sid_to_output_stats = {}
(_ktype471, _vtype472, _size470) = iprot.readMapBegin()
for _i474 in range(_size470):
_key475 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val476 = ComponentAggregateStats()
_val476.read(iprot)
self.sid_to_output_stats[_key475] = _val476
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.LIST:
self.exec_stats = []
(_etype480, _size477) = iprot.readListBegin()
for _i481 in range(_size477):
_elem482 = ExecutorAggregateStats()
_elem482.read(iprot)
self.exec_stats.append(_elem482)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.LIST:
self.errors = []
(_etype486, _size483) = iprot.readListBegin()
for _i487 in range(_size483):
_elem488 = ErrorInfo()
_elem488.read(iprot)
self.errors.append(_elem488)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.STRING:
self.eventlog_host = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.I32:
self.eventlog_port = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.STRUCT:
self.debug_options = DebugOptions()
self.debug_options.read(iprot)
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.STRING:
self.topology_status = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.MAP:
self.resources_map = {}
(_ktype490, _vtype491, _size489) = iprot.readMapBegin()
for _i493 in range(_size489):
_key494 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val495 = iprot.readDouble()
self.resources_map[_key494] = _val495
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ComponentPageInfo')
if self.component_id is not None:
oprot.writeFieldBegin('component_id', TType.STRING, 1)
oprot.writeString(self.component_id.encode('utf-8') if sys.version_info[0] == 2 else self.component_id)
oprot.writeFieldEnd()
if self.component_type is not None:
oprot.writeFieldBegin('component_type', TType.I32, 2)
oprot.writeI32(self.component_type)
oprot.writeFieldEnd()
if self.topology_id is not None:
oprot.writeFieldBegin('topology_id', TType.STRING, 3)
oprot.writeString(self.topology_id.encode('utf-8') if sys.version_info[0] == 2 else self.topology_id)
oprot.writeFieldEnd()
if self.topology_name is not None:
oprot.writeFieldBegin('topology_name', TType.STRING, 4)
oprot.writeString(self.topology_name.encode('utf-8') if sys.version_info[0] == 2 else self.topology_name)
oprot.writeFieldEnd()
if self.num_executors is not None:
oprot.writeFieldBegin('num_executors', TType.I32, 5)
oprot.writeI32(self.num_executors)
oprot.writeFieldEnd()
if self.num_tasks is not None:
oprot.writeFieldBegin('num_tasks', TType.I32, 6)
oprot.writeI32(self.num_tasks)
oprot.writeFieldEnd()
if self.window_to_stats is not None:
oprot.writeFieldBegin('window_to_stats', TType.MAP, 7)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.window_to_stats))
for kiter496, viter497 in self.window_to_stats.items():
oprot.writeString(kiter496.encode('utf-8') if sys.version_info[0] == 2 else kiter496)
viter497.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.gsid_to_input_stats is not None:
oprot.writeFieldBegin('gsid_to_input_stats', TType.MAP, 8)
oprot.writeMapBegin(TType.STRUCT, TType.STRUCT, len(self.gsid_to_input_stats))
for kiter498, viter499 in self.gsid_to_input_stats.items():
kiter498.write(oprot)
viter499.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.sid_to_output_stats is not None:
oprot.writeFieldBegin('sid_to_output_stats', TType.MAP, 9)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.sid_to_output_stats))
for kiter500, viter501 in self.sid_to_output_stats.items():
oprot.writeString(kiter500.encode('utf-8') if sys.version_info[0] == 2 else kiter500)
viter501.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.exec_stats is not None:
oprot.writeFieldBegin('exec_stats', TType.LIST, 10)
oprot.writeListBegin(TType.STRUCT, len(self.exec_stats))
for iter502 in self.exec_stats:
iter502.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.errors is not None:
oprot.writeFieldBegin('errors', TType.LIST, 11)
oprot.writeListBegin(TType.STRUCT, len(self.errors))
for iter503 in self.errors:
iter503.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.eventlog_host is not None:
oprot.writeFieldBegin('eventlog_host', TType.STRING, 12)
oprot.writeString(self.eventlog_host.encode('utf-8') if sys.version_info[0] == 2 else self.eventlog_host)
oprot.writeFieldEnd()
if self.eventlog_port is not None:
oprot.writeFieldBegin('eventlog_port', TType.I32, 13)
oprot.writeI32(self.eventlog_port)
oprot.writeFieldEnd()
if self.debug_options is not None:
oprot.writeFieldBegin('debug_options', TType.STRUCT, 14)
self.debug_options.write(oprot)
oprot.writeFieldEnd()
if self.topology_status is not None:
oprot.writeFieldBegin('topology_status', TType.STRING, 15)
oprot.writeString(self.topology_status.encode('utf-8') if sys.version_info[0] == 2 else self.topology_status)
oprot.writeFieldEnd()
if self.resources_map is not None:
oprot.writeFieldBegin('resources_map', TType.MAP, 16)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.resources_map))
for kiter504, viter505 in self.resources_map.items():
oprot.writeString(kiter504.encode('utf-8') if sys.version_info[0] == 2 else kiter504)
oprot.writeDouble(viter505)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.component_id is None:
raise TProtocolException(message='Required field component_id is unset!')
if self.component_type is None:
raise TProtocolException(message='Required field component_type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KillOptions(object):
"""
Attributes:
- wait_secs
"""
def __init__(self, wait_secs=None,):
self.wait_secs = wait_secs
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.wait_secs = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('KillOptions')
if self.wait_secs is not None:
oprot.writeFieldBegin('wait_secs', TType.I32, 1)
oprot.writeI32(self.wait_secs)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RebalanceOptions(object):
"""
Attributes:
- wait_secs
- num_workers
- num_executors
- topology_resources_overrides
- topology_conf_overrides
- principal
"""
def __init__(self, wait_secs=None, num_workers=None, num_executors=None, topology_resources_overrides=None, topology_conf_overrides=None, principal=None,):
self.wait_secs = wait_secs
self.num_workers = num_workers
self.num_executors = num_executors
self.topology_resources_overrides = topology_resources_overrides
self.topology_conf_overrides = topology_conf_overrides
self.principal = principal
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.wait_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.num_workers = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.num_executors = {}
(_ktype507, _vtype508, _size506) = iprot.readMapBegin()
for _i510 in range(_size506):
_key511 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val512 = iprot.readI32()
self.num_executors[_key511] = _val512
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.topology_resources_overrides = {}
(_ktype514, _vtype515, _size513) = iprot.readMapBegin()
for _i517 in range(_size513):
_key518 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val519 = {}
(_ktype521, _vtype522, _size520) = iprot.readMapBegin()
for _i524 in range(_size520):
_key525 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val526 = iprot.readDouble()
_val519[_key525] = _val526
iprot.readMapEnd()
self.topology_resources_overrides[_key518] = _val519
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.topology_conf_overrides = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.principal = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('RebalanceOptions')
if self.wait_secs is not None:
oprot.writeFieldBegin('wait_secs', TType.I32, 1)
oprot.writeI32(self.wait_secs)
oprot.writeFieldEnd()
if self.num_workers is not None:
oprot.writeFieldBegin('num_workers', TType.I32, 2)
oprot.writeI32(self.num_workers)
oprot.writeFieldEnd()
if self.num_executors is not None:
oprot.writeFieldBegin('num_executors', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.num_executors))
for kiter527, viter528 in self.num_executors.items():
oprot.writeString(kiter527.encode('utf-8') if sys.version_info[0] == 2 else kiter527)
oprot.writeI32(viter528)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.topology_resources_overrides is not None:
oprot.writeFieldBegin('topology_resources_overrides', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.topology_resources_overrides))
for kiter529, viter530 in self.topology_resources_overrides.items():
oprot.writeString(kiter529.encode('utf-8') if sys.version_info[0] == 2 else kiter529)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(viter530))
for kiter531, viter532 in viter530.items():
oprot.writeString(kiter531.encode('utf-8') if sys.version_info[0] == 2 else kiter531)
oprot.writeDouble(viter532)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.topology_conf_overrides is not None:
oprot.writeFieldBegin('topology_conf_overrides', TType.STRING, 5)
oprot.writeString(self.topology_conf_overrides.encode('utf-8') if sys.version_info[0] == 2 else self.topology_conf_overrides)
oprot.writeFieldEnd()
if self.principal is not None:
oprot.writeFieldBegin('principal', TType.STRING, 6)
oprot.writeString(self.principal.encode('utf-8') if sys.version_info[0] == 2 else self.principal)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Credentials(object):
"""
Attributes:
- creds
- topoOwner
"""
def __init__(self, creds=None, topoOwner=None,):
self.creds = creds
self.topoOwner = topoOwner
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.creds = {}
(_ktype534, _vtype535, _size533) = iprot.readMapBegin()
for _i537 in range(_size533):
_key538 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val539 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.creds[_key538] = _val539
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.topoOwner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Credentials')
if self.creds is not None:
oprot.writeFieldBegin('creds', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.creds))
for kiter540, viter541 in self.creds.items():
oprot.writeString(kiter540.encode('utf-8') if sys.version_info[0] == 2 else kiter540)
oprot.writeString(viter541.encode('utf-8') if sys.version_info[0] == 2 else viter541)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.topoOwner is not None:
oprot.writeFieldBegin('topoOwner', TType.STRING, 2)
oprot.writeString(self.topoOwner.encode('utf-8') if sys.version_info[0] == 2 else self.topoOwner)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.creds is None:
raise TProtocolException(message='Required field creds is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SubmitOptions(object):
"""
Attributes:
- initial_status
- creds
"""
def __init__(self, initial_status=None, creds=None,):
self.initial_status = initial_status
self.creds = creds
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.initial_status = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.creds = Credentials()
self.creds.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SubmitOptions')
if self.initial_status is not None:
oprot.writeFieldBegin('initial_status', TType.I32, 1)
oprot.writeI32(self.initial_status)
oprot.writeFieldEnd()
if self.creds is not None:
oprot.writeFieldBegin('creds', TType.STRUCT, 2)
self.creds.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.initial_status is None:
raise TProtocolException(message='Required field initial_status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AccessControl(object):
"""
Attributes:
- type
- name
- access
"""
def __init__(self, type=None, name=None, access=None,):
self.type = type
self.name = name
self.access = access
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.access = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('AccessControl')
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 1)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.access is not None:
oprot.writeFieldBegin('access', TType.I32, 3)
oprot.writeI32(self.access)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.type is None:
raise TProtocolException(message='Required field type is unset!')
if self.access is None:
raise TProtocolException(message='Required field access is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SettableBlobMeta(object):
"""
Attributes:
- acl
- replication_factor
"""
def __init__(self, acl=None, replication_factor=None,):
self.acl = acl
self.replication_factor = replication_factor
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.acl = []
(_etype545, _size542) = iprot.readListBegin()
for _i546 in range(_size542):
_elem547 = AccessControl()
_elem547.read(iprot)
self.acl.append(_elem547)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.replication_factor = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SettableBlobMeta')
if self.acl is not None:
oprot.writeFieldBegin('acl', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.acl))
for iter548 in self.acl:
iter548.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.replication_factor is not None:
oprot.writeFieldBegin('replication_factor', TType.I32, 2)
oprot.writeI32(self.replication_factor)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.acl is None:
raise TProtocolException(message='Required field acl is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ReadableBlobMeta(object):
"""
Attributes:
- settable
- version
"""
def __init__(self, settable=None, version=None,):
self.settable = settable
self.version = version
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.settable = SettableBlobMeta()
self.settable.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.version = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ReadableBlobMeta')
if self.settable is not None:
oprot.writeFieldBegin('settable', TType.STRUCT, 1)
self.settable.write(oprot)
oprot.writeFieldEnd()
if self.version is not None:
oprot.writeFieldBegin('version', TType.I64, 2)
oprot.writeI64(self.version)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.settable is None:
raise TProtocolException(message='Required field settable is unset!')
if self.version is None:
raise TProtocolException(message='Required field version is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ListBlobsResult(object):
"""
Attributes:
- keys
- session
"""
def __init__(self, keys=None, session=None,):
self.keys = keys
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.keys = []
(_etype552, _size549) = iprot.readListBegin()
for _i553 in range(_size549):
_elem554 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.keys.append(_elem554)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ListBlobsResult')
if self.keys is not None:
oprot.writeFieldBegin('keys', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.keys))
for iter555 in self.keys:
oprot.writeString(iter555.encode('utf-8') if sys.version_info[0] == 2 else iter555)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 2)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.keys is None:
raise TProtocolException(message='Required field keys is unset!')
if self.session is None:
raise TProtocolException(message='Required field session is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class BeginDownloadResult(object):
"""
Attributes:
- version
- session
- data_size
"""
def __init__(self, version=None, session=None, data_size=None,):
self.version = version
self.session = session
self.data_size = data_size
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.version = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.data_size = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('BeginDownloadResult')
if self.version is not None:
oprot.writeFieldBegin('version', TType.I64, 1)
oprot.writeI64(self.version)
oprot.writeFieldEnd()
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 2)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.data_size is not None:
oprot.writeFieldBegin('data_size', TType.I64, 3)
oprot.writeI64(self.data_size)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.version is None:
raise TProtocolException(message='Required field version is unset!')
if self.session is None:
raise TProtocolException(message='Required field session is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SupervisorInfo(object):
"""
Attributes:
- time_secs
- hostname
- assignment_id
- used_ports
- meta
- scheduler_meta
- uptime_secs
- version
- resources_map
- server_port
"""
def __init__(self, time_secs=None, hostname=None, assignment_id=None, used_ports=None, meta=None, scheduler_meta=None, uptime_secs=None, version=None, resources_map=None, server_port=None,):
self.time_secs = time_secs
self.hostname = hostname
self.assignment_id = assignment_id
self.used_ports = used_ports
self.meta = meta
self.scheduler_meta = scheduler_meta
self.uptime_secs = uptime_secs
self.version = version
self.resources_map = resources_map
self.server_port = server_port
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.time_secs = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.hostname = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.assignment_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.used_ports = []
(_etype559, _size556) = iprot.readListBegin()
for _i560 in range(_size556):
_elem561 = iprot.readI64()
self.used_ports.append(_elem561)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.meta = []
(_etype565, _size562) = iprot.readListBegin()
for _i566 in range(_size562):
_elem567 = iprot.readI64()
self.meta.append(_elem567)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.MAP:
self.scheduler_meta = {}
(_ktype569, _vtype570, _size568) = iprot.readMapBegin()
for _i572 in range(_size568):
_key573 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val574 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.scheduler_meta[_key573] = _val574
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.I64:
self.uptime_secs = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.MAP:
self.resources_map = {}
(_ktype576, _vtype577, _size575) = iprot.readMapBegin()
for _i579 in range(_size575):
_key580 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val581 = iprot.readDouble()
self.resources_map[_key580] = _val581
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.I32:
self.server_port = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SupervisorInfo')
if self.time_secs is not None:
oprot.writeFieldBegin('time_secs', TType.I64, 1)
oprot.writeI64(self.time_secs)
oprot.writeFieldEnd()
if self.hostname is not None:
oprot.writeFieldBegin('hostname', TType.STRING, 2)
oprot.writeString(self.hostname.encode('utf-8') if sys.version_info[0] == 2 else self.hostname)
oprot.writeFieldEnd()
if self.assignment_id is not None:
oprot.writeFieldBegin('assignment_id', TType.STRING, 3)
oprot.writeString(self.assignment_id.encode('utf-8') if sys.version_info[0] == 2 else self.assignment_id)
oprot.writeFieldEnd()
if self.used_ports is not None:
oprot.writeFieldBegin('used_ports', TType.LIST, 4)
oprot.writeListBegin(TType.I64, len(self.used_ports))
for iter582 in self.used_ports:
oprot.writeI64(iter582)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.meta is not None:
oprot.writeFieldBegin('meta', TType.LIST, 5)
oprot.writeListBegin(TType.I64, len(self.meta))
for iter583 in self.meta:
oprot.writeI64(iter583)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.scheduler_meta is not None:
oprot.writeFieldBegin('scheduler_meta', TType.MAP, 6)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.scheduler_meta))
for kiter584, viter585 in self.scheduler_meta.items():
oprot.writeString(kiter584.encode('utf-8') if sys.version_info[0] == 2 else kiter584)
oprot.writeString(viter585.encode('utf-8') if sys.version_info[0] == 2 else viter585)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I64, 7)
oprot.writeI64(self.uptime_secs)
oprot.writeFieldEnd()
if self.version is not None:
oprot.writeFieldBegin('version', TType.STRING, 8)
oprot.writeString(self.version.encode('utf-8') if sys.version_info[0] == 2 else self.version)
oprot.writeFieldEnd()
if self.resources_map is not None:
oprot.writeFieldBegin('resources_map', TType.MAP, 9)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.resources_map))
for kiter586, viter587 in self.resources_map.items():
oprot.writeString(kiter586.encode('utf-8') if sys.version_info[0] == 2 else kiter586)
oprot.writeDouble(viter587)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.server_port is not None:
oprot.writeFieldBegin('server_port', TType.I32, 10)
oprot.writeI32(self.server_port)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.time_secs is None:
raise TProtocolException(message='Required field time_secs is unset!')
if self.hostname is None:
raise TProtocolException(message='Required field hostname is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NodeInfo(object):
"""
Attributes:
- node
- port
"""
def __init__(self, node=None, port=None,):
self.node = node
self.port = port
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.node = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.SET:
self.port = set()
(_etype591, _size588) = iprot.readSetBegin()
for _i592 in range(_size588):
_elem593 = iprot.readI64()
self.port.add(_elem593)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('NodeInfo')
if self.node is not None:
oprot.writeFieldBegin('node', TType.STRING, 1)
oprot.writeString(self.node.encode('utf-8') if sys.version_info[0] == 2 else self.node)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.SET, 2)
oprot.writeSetBegin(TType.I64, len(self.port))
for iter594 in self.port:
oprot.writeI64(iter594)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.node is None:
raise TProtocolException(message='Required field node is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class WorkerResources(object):
"""
Attributes:
- mem_on_heap
- mem_off_heap
- cpu
- shared_mem_on_heap
- shared_mem_off_heap
- resources
- shared_resources
"""
def __init__(self, mem_on_heap=None, mem_off_heap=None, cpu=None, shared_mem_on_heap=None, shared_mem_off_heap=None, resources=None, shared_resources=None,):
self.mem_on_heap = mem_on_heap
self.mem_off_heap = mem_off_heap
self.cpu = cpu
self.shared_mem_on_heap = shared_mem_on_heap
self.shared_mem_off_heap = shared_mem_off_heap
self.resources = resources
self.shared_resources = shared_resources
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.mem_on_heap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.DOUBLE:
self.mem_off_heap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.DOUBLE:
self.cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.shared_mem_on_heap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.DOUBLE:
self.shared_mem_off_heap = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.MAP:
self.resources = {}
(_ktype596, _vtype597, _size595) = iprot.readMapBegin()
for _i599 in range(_size595):
_key600 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val601 = iprot.readDouble()
self.resources[_key600] = _val601
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.MAP:
self.shared_resources = {}
(_ktype603, _vtype604, _size602) = iprot.readMapBegin()
for _i606 in range(_size602):
_key607 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val608 = iprot.readDouble()
self.shared_resources[_key607] = _val608
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('WorkerResources')
if self.mem_on_heap is not None:
oprot.writeFieldBegin('mem_on_heap', TType.DOUBLE, 1)
oprot.writeDouble(self.mem_on_heap)
oprot.writeFieldEnd()
if self.mem_off_heap is not None:
oprot.writeFieldBegin('mem_off_heap', TType.DOUBLE, 2)
oprot.writeDouble(self.mem_off_heap)
oprot.writeFieldEnd()
if self.cpu is not None:
oprot.writeFieldBegin('cpu', TType.DOUBLE, 3)
oprot.writeDouble(self.cpu)
oprot.writeFieldEnd()
if self.shared_mem_on_heap is not None:
oprot.writeFieldBegin('shared_mem_on_heap', TType.DOUBLE, 4)
oprot.writeDouble(self.shared_mem_on_heap)
oprot.writeFieldEnd()
if self.shared_mem_off_heap is not None:
oprot.writeFieldBegin('shared_mem_off_heap', TType.DOUBLE, 5)
oprot.writeDouble(self.shared_mem_off_heap)
oprot.writeFieldEnd()
if self.resources is not None:
oprot.writeFieldBegin('resources', TType.MAP, 6)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.resources))
for kiter609, viter610 in self.resources.items():
oprot.writeString(kiter609.encode('utf-8') if sys.version_info[0] == 2 else kiter609)
oprot.writeDouble(viter610)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.shared_resources is not None:
oprot.writeFieldBegin('shared_resources', TType.MAP, 7)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.shared_resources))
for kiter611, viter612 in self.shared_resources.items():
oprot.writeString(kiter611.encode('utf-8') if sys.version_info[0] == 2 else kiter611)
oprot.writeDouble(viter612)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Assignment(object):
"""
Attributes:
- master_code_dir
- node_host
- executor_node_port
- executor_start_time_secs
- worker_resources
- total_shared_off_heap
- owner
"""
def __init__(self, master_code_dir=None, node_host={
}, executor_node_port={
}, executor_start_time_secs={
}, worker_resources={
}, total_shared_off_heap={
}, owner=None,):
self.master_code_dir = master_code_dir
if node_host is self.thrift_spec[2][4]:
node_host = {
}
self.node_host = node_host
if executor_node_port is self.thrift_spec[3][4]:
executor_node_port = {
}
self.executor_node_port = executor_node_port
if executor_start_time_secs is self.thrift_spec[4][4]:
executor_start_time_secs = {
}
self.executor_start_time_secs = executor_start_time_secs
if worker_resources is self.thrift_spec[5][4]:
worker_resources = {
}
self.worker_resources = worker_resources
if total_shared_off_heap is self.thrift_spec[6][4]:
total_shared_off_heap = {
}
self.total_shared_off_heap = total_shared_off_heap
self.owner = owner
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.master_code_dir = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.node_host = {}
(_ktype614, _vtype615, _size613) = iprot.readMapBegin()
for _i617 in range(_size613):
_key618 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val619 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.node_host[_key618] = _val619
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.executor_node_port = {}
(_ktype621, _vtype622, _size620) = iprot.readMapBegin()
for _i624 in range(_size620):
_key625 = []
(_etype630, _size627) = iprot.readListBegin()
for _i631 in range(_size627):
_elem632 = iprot.readI64()
_key625.append(_elem632)
iprot.readListEnd()
_val626 = NodeInfo()
_val626.read(iprot)
self.executor_node_port[_key625] = _val626
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.executor_start_time_secs = {}
(_ktype634, _vtype635, _size633) = iprot.readMapBegin()
for _i637 in range(_size633):
_key638 = []
(_etype643, _size640) = iprot.readListBegin()
for _i644 in range(_size640):
_elem645 = iprot.readI64()
_key638.append(_elem645)
iprot.readListEnd()
_val639 = iprot.readI64()
self.executor_start_time_secs[_key638] = _val639
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.MAP:
self.worker_resources = {}
(_ktype647, _vtype648, _size646) = iprot.readMapBegin()
for _i650 in range(_size646):
_key651 = NodeInfo()
_key651.read(iprot)
_val652 = WorkerResources()
_val652.read(iprot)
self.worker_resources[_key651] = _val652
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.MAP:
self.total_shared_off_heap = {}
(_ktype654, _vtype655, _size653) = iprot.readMapBegin()
for _i657 in range(_size653):
_key658 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val659 = iprot.readDouble()
self.total_shared_off_heap[_key658] = _val659
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Assignment')
if self.master_code_dir is not None:
oprot.writeFieldBegin('master_code_dir', TType.STRING, 1)
oprot.writeString(self.master_code_dir.encode('utf-8') if sys.version_info[0] == 2 else self.master_code_dir)
oprot.writeFieldEnd()
if self.node_host is not None:
oprot.writeFieldBegin('node_host', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.node_host))
for kiter660, viter661 in self.node_host.items():
oprot.writeString(kiter660.encode('utf-8') if sys.version_info[0] == 2 else kiter660)
oprot.writeString(viter661.encode('utf-8') if sys.version_info[0] == 2 else viter661)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.executor_node_port is not None:
oprot.writeFieldBegin('executor_node_port', TType.MAP, 3)
oprot.writeMapBegin(TType.LIST, TType.STRUCT, len(self.executor_node_port))
for kiter662, viter663 in self.executor_node_port.items():
oprot.writeListBegin(TType.I64, len(kiter662))
for iter664 in kiter662:
oprot.writeI64(iter664)
oprot.writeListEnd()
viter663.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.executor_start_time_secs is not None:
oprot.writeFieldBegin('executor_start_time_secs', TType.MAP, 4)
oprot.writeMapBegin(TType.LIST, TType.I64, len(self.executor_start_time_secs))
for kiter665, viter666 in self.executor_start_time_secs.items():
oprot.writeListBegin(TType.I64, len(kiter665))
for iter667 in kiter665:
oprot.writeI64(iter667)
oprot.writeListEnd()
oprot.writeI64(viter666)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.worker_resources is not None:
oprot.writeFieldBegin('worker_resources', TType.MAP, 5)
oprot.writeMapBegin(TType.STRUCT, TType.STRUCT, len(self.worker_resources))
for kiter668, viter669 in self.worker_resources.items():
kiter668.write(oprot)
viter669.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.total_shared_off_heap is not None:
oprot.writeFieldBegin('total_shared_off_heap', TType.MAP, 6)
oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.total_shared_off_heap))
for kiter670, viter671 in self.total_shared_off_heap.items():
oprot.writeString(kiter670.encode('utf-8') if sys.version_info[0] == 2 else kiter670)
oprot.writeDouble(viter671)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 7)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.master_code_dir is None:
raise TProtocolException(message='Required field master_code_dir is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TopologyActionOptions(object):
"""
Attributes:
- kill_options
- rebalance_options
"""
def __init__(self, kill_options=None, rebalance_options=None,):
self.kill_options = kill_options
self.rebalance_options = rebalance_options
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.kill_options = KillOptions()
self.kill_options.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.rebalance_options = RebalanceOptions()
self.rebalance_options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('TopologyActionOptions')
if self.kill_options is not None:
oprot.writeFieldBegin('kill_options', TType.STRUCT, 1)
self.kill_options.write(oprot)
oprot.writeFieldEnd()
if self.rebalance_options is not None:
oprot.writeFieldBegin('rebalance_options', TType.STRUCT, 2)
self.rebalance_options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class StormBase(object):
"""
Attributes:
- name
- status
- num_workers
- component_executors
- launch_time_secs
- owner
- topology_action_options
- prev_status
- component_debug
- principal
- topology_version
"""
def __init__(self, name=None, status=None, num_workers=None, component_executors=None, launch_time_secs=None, owner=None, topology_action_options=None, prev_status=None, component_debug=None, principal=None, topology_version=None,):
self.name = name
self.status = status
self.num_workers = num_workers
self.component_executors = component_executors
self.launch_time_secs = launch_time_secs
self.owner = owner
self.topology_action_options = topology_action_options
self.prev_status = prev_status
self.component_debug = component_debug
self.principal = principal
self.topology_version = topology_version
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.status = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.num_workers = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.component_executors = {}
(_ktype673, _vtype674, _size672) = iprot.readMapBegin()
for _i676 in range(_size672):
_key677 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val678 = iprot.readI32()
self.component_executors[_key677] = _val678
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.launch_time_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.topology_action_options = TopologyActionOptions()
self.topology_action_options.read(iprot)
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.I32:
self.prev_status = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.MAP:
self.component_debug = {}
(_ktype680, _vtype681, _size679) = iprot.readMapBegin()
for _i683 in range(_size679):
_key684 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val685 = DebugOptions()
_val685.read(iprot)
self.component_debug[_key684] = _val685
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.STRING:
self.principal = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.STRING:
self.topology_version = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('StormBase')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.status is not None:
oprot.writeFieldBegin('status', TType.I32, 2)
oprot.writeI32(self.status)
oprot.writeFieldEnd()
if self.num_workers is not None:
oprot.writeFieldBegin('num_workers', TType.I32, 3)
oprot.writeI32(self.num_workers)
oprot.writeFieldEnd()
if self.component_executors is not None:
oprot.writeFieldBegin('component_executors', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.component_executors))
for kiter686, viter687 in self.component_executors.items():
oprot.writeString(kiter686.encode('utf-8') if sys.version_info[0] == 2 else kiter686)
oprot.writeI32(viter687)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.launch_time_secs is not None:
oprot.writeFieldBegin('launch_time_secs', TType.I32, 5)
oprot.writeI32(self.launch_time_secs)
oprot.writeFieldEnd()
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 6)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
if self.topology_action_options is not None:
oprot.writeFieldBegin('topology_action_options', TType.STRUCT, 7)
self.topology_action_options.write(oprot)
oprot.writeFieldEnd()
if self.prev_status is not None:
oprot.writeFieldBegin('prev_status', TType.I32, 8)
oprot.writeI32(self.prev_status)
oprot.writeFieldEnd()
if self.component_debug is not None:
oprot.writeFieldBegin('component_debug', TType.MAP, 9)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.component_debug))
for kiter688, viter689 in self.component_debug.items():
oprot.writeString(kiter688.encode('utf-8') if sys.version_info[0] == 2 else kiter688)
viter689.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.principal is not None:
oprot.writeFieldBegin('principal', TType.STRING, 10)
oprot.writeString(self.principal.encode('utf-8') if sys.version_info[0] == 2 else self.principal)
oprot.writeFieldEnd()
if self.topology_version is not None:
oprot.writeFieldBegin('topology_version', TType.STRING, 11)
oprot.writeString(self.topology_version.encode('utf-8') if sys.version_info[0] == 2 else self.topology_version)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocolException(message='Required field name is unset!')
if self.status is None:
raise TProtocolException(message='Required field status is unset!')
if self.num_workers is None:
raise TProtocolException(message='Required field num_workers is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ClusterWorkerHeartbeat(object):
"""
Attributes:
- storm_id
- executor_stats
- time_secs
- uptime_secs
"""
def __init__(self, storm_id=None, executor_stats=None, time_secs=None, uptime_secs=None,):
self.storm_id = storm_id
self.executor_stats = executor_stats
self.time_secs = time_secs
self.uptime_secs = uptime_secs
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.storm_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.executor_stats = {}
(_ktype691, _vtype692, _size690) = iprot.readMapBegin()
for _i694 in range(_size690):
_key695 = ExecutorInfo()
_key695.read(iprot)
_val696 = ExecutorStats()
_val696.read(iprot)
self.executor_stats[_key695] = _val696
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.time_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.uptime_secs = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ClusterWorkerHeartbeat')
if self.storm_id is not None:
oprot.writeFieldBegin('storm_id', TType.STRING, 1)
oprot.writeString(self.storm_id.encode('utf-8') if sys.version_info[0] == 2 else self.storm_id)
oprot.writeFieldEnd()
if self.executor_stats is not None:
oprot.writeFieldBegin('executor_stats', TType.MAP, 2)
oprot.writeMapBegin(TType.STRUCT, TType.STRUCT, len(self.executor_stats))
for kiter697, viter698 in self.executor_stats.items():
kiter697.write(oprot)
viter698.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.time_secs is not None:
oprot.writeFieldBegin('time_secs', TType.I32, 3)
oprot.writeI32(self.time_secs)
oprot.writeFieldEnd()
if self.uptime_secs is not None:
oprot.writeFieldBegin('uptime_secs', TType.I32, 4)
oprot.writeI32(self.uptime_secs)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.storm_id is None:
raise TProtocolException(message='Required field storm_id is unset!')
if self.executor_stats is None:
raise TProtocolException(message='Required field executor_stats is unset!')
if self.time_secs is None:
raise TProtocolException(message='Required field time_secs is unset!')
if self.uptime_secs is None:
raise TProtocolException(message='Required field uptime_secs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ThriftSerializedObject(object):
"""
Attributes:
- name
- bits
"""
def __init__(self, name=None, bits=None,):
self.name = name
self.bits = bits
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.bits = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ThriftSerializedObject')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.bits is not None:
oprot.writeFieldBegin('bits', TType.STRING, 2)
oprot.writeBinary(self.bits)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocolException(message='Required field name is unset!')
if self.bits is None:
raise TProtocolException(message='Required field bits is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LocalStateData(object):
"""
Attributes:
- serialized_parts
"""
def __init__(self, serialized_parts=None,):
self.serialized_parts = serialized_parts
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.serialized_parts = {}
(_ktype700, _vtype701, _size699) = iprot.readMapBegin()
for _i703 in range(_size699):
_key704 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val705 = ThriftSerializedObject()
_val705.read(iprot)
self.serialized_parts[_key704] = _val705
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LocalStateData')
if self.serialized_parts is not None:
oprot.writeFieldBegin('serialized_parts', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.serialized_parts))
for kiter706, viter707 in self.serialized_parts.items():
oprot.writeString(kiter706.encode('utf-8') if sys.version_info[0] == 2 else kiter706)
viter707.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.serialized_parts is None:
raise TProtocolException(message='Required field serialized_parts is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LocalAssignment(object):
"""
Attributes:
- topology_id
- executors
- resources
- total_node_shared
- owner
"""
def __init__(self, topology_id=None, executors=None, resources=None, total_node_shared=None, owner=None,):
self.topology_id = topology_id
self.executors = executors
self.resources = resources
self.total_node_shared = total_node_shared
self.owner = owner
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.topology_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.executors = []
(_etype711, _size708) = iprot.readListBegin()
for _i712 in range(_size708):
_elem713 = ExecutorInfo()
_elem713.read(iprot)
self.executors.append(_elem713)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.resources = WorkerResources()
self.resources.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.total_node_shared = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LocalAssignment')
if self.topology_id is not None:
oprot.writeFieldBegin('topology_id', TType.STRING, 1)
oprot.writeString(self.topology_id.encode('utf-8') if sys.version_info[0] == 2 else self.topology_id)
oprot.writeFieldEnd()
if self.executors is not None:
oprot.writeFieldBegin('executors', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.executors))
for iter714 in self.executors:
iter714.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.resources is not None:
oprot.writeFieldBegin('resources', TType.STRUCT, 3)
self.resources.write(oprot)
oprot.writeFieldEnd()
if self.total_node_shared is not None:
oprot.writeFieldBegin('total_node_shared', TType.DOUBLE, 4)
oprot.writeDouble(self.total_node_shared)
oprot.writeFieldEnd()
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 5)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.topology_id is None:
raise TProtocolException(message='Required field topology_id is unset!')
if self.executors is None:
raise TProtocolException(message='Required field executors is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LSSupervisorId(object):
"""
Attributes:
- supervisor_id
"""
def __init__(self, supervisor_id=None,):
self.supervisor_id = supervisor_id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.supervisor_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LSSupervisorId')
if self.supervisor_id is not None:
oprot.writeFieldBegin('supervisor_id', TType.STRING, 1)
oprot.writeString(self.supervisor_id.encode('utf-8') if sys.version_info[0] == 2 else self.supervisor_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.supervisor_id is None:
raise TProtocolException(message='Required field supervisor_id is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LSApprovedWorkers(object):
"""
Attributes:
- approved_workers
"""
def __init__(self, approved_workers=None,):
self.approved_workers = approved_workers
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.approved_workers = {}
(_ktype716, _vtype717, _size715) = iprot.readMapBegin()
for _i719 in range(_size715):
_key720 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val721 = iprot.readI32()
self.approved_workers[_key720] = _val721
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LSApprovedWorkers')
if self.approved_workers is not None:
oprot.writeFieldBegin('approved_workers', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.approved_workers))
for kiter722, viter723 in self.approved_workers.items():
oprot.writeString(kiter722.encode('utf-8') if sys.version_info[0] == 2 else kiter722)
oprot.writeI32(viter723)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.approved_workers is None:
raise TProtocolException(message='Required field approved_workers is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LSSupervisorAssignments(object):
"""
Attributes:
- assignments
"""
def __init__(self, assignments=None,):
self.assignments = assignments
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.assignments = {}
(_ktype725, _vtype726, _size724) = iprot.readMapBegin()
for _i728 in range(_size724):
_key729 = iprot.readI32()
_val730 = LocalAssignment()
_val730.read(iprot)
self.assignments[_key729] = _val730
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LSSupervisorAssignments')
if self.assignments is not None:
oprot.writeFieldBegin('assignments', TType.MAP, 1)
oprot.writeMapBegin(TType.I32, TType.STRUCT, len(self.assignments))
for kiter731, viter732 in self.assignments.items():
oprot.writeI32(kiter731)
viter732.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.assignments is None:
raise TProtocolException(message='Required field assignments is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LSWorkerHeartbeat(object):
"""
Attributes:
- time_secs
- topology_id
- executors
- port
"""
def __init__(self, time_secs=None, topology_id=None, executors=None, port=None,):
self.time_secs = time_secs
self.topology_id = topology_id
self.executors = executors
self.port = port
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.time_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.topology_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.executors = []
(_etype736, _size733) = iprot.readListBegin()
for _i737 in range(_size733):
_elem738 = ExecutorInfo()
_elem738.read(iprot)
self.executors.append(_elem738)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LSWorkerHeartbeat')
if self.time_secs is not None:
oprot.writeFieldBegin('time_secs', TType.I32, 1)
oprot.writeI32(self.time_secs)
oprot.writeFieldEnd()
if self.topology_id is not None:
oprot.writeFieldBegin('topology_id', TType.STRING, 2)
oprot.writeString(self.topology_id.encode('utf-8') if sys.version_info[0] == 2 else self.topology_id)
oprot.writeFieldEnd()
if self.executors is not None:
oprot.writeFieldBegin('executors', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.executors))
for iter739 in self.executors:
iter739.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 4)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.time_secs is None:
raise TProtocolException(message='Required field time_secs is unset!')
if self.topology_id is None:
raise TProtocolException(message='Required field topology_id is unset!')
if self.executors is None:
raise TProtocolException(message='Required field executors is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LSTopoHistory(object):
"""
Attributes:
- topology_id
- time_stamp
- users
- groups
"""
def __init__(self, topology_id=None, time_stamp=None, users=None, groups=None,):
self.topology_id = topology_id
self.time_stamp = time_stamp
self.users = users
self.groups = groups
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.topology_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.time_stamp = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.users = []
(_etype743, _size740) = iprot.readListBegin()
for _i744 in range(_size740):
_elem745 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.users.append(_elem745)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.groups = []
(_etype749, _size746) = iprot.readListBegin()
for _i750 in range(_size746):
_elem751 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.groups.append(_elem751)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LSTopoHistory')
if self.topology_id is not None:
oprot.writeFieldBegin('topology_id', TType.STRING, 1)
oprot.writeString(self.topology_id.encode('utf-8') if sys.version_info[0] == 2 else self.topology_id)
oprot.writeFieldEnd()
if self.time_stamp is not None:
oprot.writeFieldBegin('time_stamp', TType.I64, 2)
oprot.writeI64(self.time_stamp)
oprot.writeFieldEnd()
if self.users is not None:
oprot.writeFieldBegin('users', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.users))
for iter752 in self.users:
oprot.writeString(iter752.encode('utf-8') if sys.version_info[0] == 2 else iter752)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.groups is not None:
oprot.writeFieldBegin('groups', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.groups))
for iter753 in self.groups:
oprot.writeString(iter753.encode('utf-8') if sys.version_info[0] == 2 else iter753)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.topology_id is None:
raise TProtocolException(message='Required field topology_id is unset!')
if self.time_stamp is None:
raise TProtocolException(message='Required field time_stamp is unset!')
if self.users is None:
raise TProtocolException(message='Required field users is unset!')
if self.groups is None:
raise TProtocolException(message='Required field groups is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LSTopoHistoryList(object):
"""
Attributes:
- topo_history
"""
def __init__(self, topo_history=None,):
self.topo_history = topo_history
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.topo_history = []
(_etype757, _size754) = iprot.readListBegin()
for _i758 in range(_size754):
_elem759 = LSTopoHistory()
_elem759.read(iprot)
self.topo_history.append(_elem759)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LSTopoHistoryList')
if self.topo_history is not None:
oprot.writeFieldBegin('topo_history', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.topo_history))
for iter760 in self.topo_history:
iter760.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.topo_history is None:
raise TProtocolException(message='Required field topo_history is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ProfileRequest(object):
"""
Attributes:
- nodeInfo
- action
- time_stamp
"""
def __init__(self, nodeInfo=None, action=None, time_stamp=None,):
self.nodeInfo = nodeInfo
self.action = action
self.time_stamp = time_stamp
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.nodeInfo = NodeInfo()
self.nodeInfo.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.action = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.time_stamp = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('ProfileRequest')
if self.nodeInfo is not None:
oprot.writeFieldBegin('nodeInfo', TType.STRUCT, 1)
self.nodeInfo.write(oprot)
oprot.writeFieldEnd()
if self.action is not None:
oprot.writeFieldBegin('action', TType.I32, 2)
oprot.writeI32(self.action)
oprot.writeFieldEnd()
if self.time_stamp is not None:
oprot.writeFieldBegin('time_stamp', TType.I64, 3)
oprot.writeI64(self.time_stamp)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.nodeInfo is None:
raise TProtocolException(message='Required field nodeInfo is unset!')
if self.action is None:
raise TProtocolException(message='Required field action is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetInfoOptions(object):
"""
Attributes:
- num_err_choice
"""
def __init__(self, num_err_choice=None,):
self.num_err_choice = num_err_choice
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.num_err_choice = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('GetInfoOptions')
if self.num_err_choice is not None:
oprot.writeFieldBegin('num_err_choice', TType.I32, 1)
oprot.writeI32(self.num_err_choice)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LogLevel(object):
"""
Attributes:
- action
- target_log_level
- reset_log_level_timeout_secs
- reset_log_level_timeout_epoch
- reset_log_level
"""
def __init__(self, action=None, target_log_level=None, reset_log_level_timeout_secs=None, reset_log_level_timeout_epoch=None, reset_log_level=None,):
self.action = action
self.target_log_level = target_log_level
self.reset_log_level_timeout_secs = reset_log_level_timeout_secs
self.reset_log_level_timeout_epoch = reset_log_level_timeout_epoch
self.reset_log_level = reset_log_level
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.action = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.target_log_level = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.reset_log_level_timeout_secs = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.reset_log_level_timeout_epoch = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.reset_log_level = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LogLevel')
if self.action is not None:
oprot.writeFieldBegin('action', TType.I32, 1)
oprot.writeI32(self.action)
oprot.writeFieldEnd()
if self.target_log_level is not None:
oprot.writeFieldBegin('target_log_level', TType.STRING, 2)
oprot.writeString(self.target_log_level.encode('utf-8') if sys.version_info[0] == 2 else self.target_log_level)
oprot.writeFieldEnd()
if self.reset_log_level_timeout_secs is not None:
oprot.writeFieldBegin('reset_log_level_timeout_secs', TType.I32, 3)
oprot.writeI32(self.reset_log_level_timeout_secs)
oprot.writeFieldEnd()
if self.reset_log_level_timeout_epoch is not None:
oprot.writeFieldBegin('reset_log_level_timeout_epoch', TType.I64, 4)
oprot.writeI64(self.reset_log_level_timeout_epoch)
oprot.writeFieldEnd()
if self.reset_log_level is not None:
oprot.writeFieldBegin('reset_log_level', TType.STRING, 5)
oprot.writeString(self.reset_log_level.encode('utf-8') if sys.version_info[0] == 2 else self.reset_log_level)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.action is None:
raise TProtocolException(message='Required field action is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class LogConfig(object):
"""
Attributes:
- named_logger_level
"""
def __init__(self, named_logger_level=None,):
self.named_logger_level = named_logger_level
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.MAP:
self.named_logger_level = {}
(_ktype762, _vtype763, _size761) = iprot.readMapBegin()
for _i765 in range(_size761):
_key766 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val767 = LogLevel()
_val767.read(iprot)
self.named_logger_level[_key766] = _val767
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('LogConfig')
if self.named_logger_level is not None:
oprot.writeFieldBegin('named_logger_level', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.named_logger_level))
for kiter768, viter769 in self.named_logger_level.items():
oprot.writeString(kiter768.encode('utf-8') if sys.version_info[0] == 2 else kiter768)
viter769.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TopologyHistoryInfo(object):
"""
Attributes:
- topo_ids
"""
def __init__(self, topo_ids=None,):
self.topo_ids = topo_ids
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.topo_ids = []
(_etype773, _size770) = iprot.readListBegin()
for _i774 in range(_size770):
_elem775 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.topo_ids.append(_elem775)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('TopologyHistoryInfo')
if self.topo_ids is not None:
oprot.writeFieldBegin('topo_ids', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.topo_ids))
for iter776 in self.topo_ids:
oprot.writeString(iter776.encode('utf-8') if sys.version_info[0] == 2 else iter776)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class OwnerResourceSummary(object):
"""
Attributes:
- owner
- total_topologies
- total_executors
- total_workers
- memory_usage
- cpu_usage
- memory_guarantee
- cpu_guarantee
- memory_guarantee_remaining
- cpu_guarantee_remaining
- isolated_node_guarantee
- total_tasks
- requested_on_heap_memory
- requested_off_heap_memory
- requested_total_memory
- requested_cpu
- assigned_on_heap_memory
- assigned_off_heap_memory
"""
def __init__(self, owner=None, total_topologies=None, total_executors=None, total_workers=None, memory_usage=None, cpu_usage=None, memory_guarantee=None, cpu_guarantee=None, memory_guarantee_remaining=None, cpu_guarantee_remaining=None, isolated_node_guarantee=None, total_tasks=None, requested_on_heap_memory=None, requested_off_heap_memory=None, requested_total_memory=None, requested_cpu=None, assigned_on_heap_memory=None, assigned_off_heap_memory=None,):
self.owner = owner
self.total_topologies = total_topologies
self.total_executors = total_executors
self.total_workers = total_workers
self.memory_usage = memory_usage
self.cpu_usage = cpu_usage
self.memory_guarantee = memory_guarantee
self.cpu_guarantee = cpu_guarantee
self.memory_guarantee_remaining = memory_guarantee_remaining
self.cpu_guarantee_remaining = cpu_guarantee_remaining
self.isolated_node_guarantee = isolated_node_guarantee
self.total_tasks = total_tasks
self.requested_on_heap_memory = requested_on_heap_memory
self.requested_off_heap_memory = requested_off_heap_memory
self.requested_total_memory = requested_total_memory
self.requested_cpu = requested_cpu
self.assigned_on_heap_memory = assigned_on_heap_memory
self.assigned_off_heap_memory = assigned_off_heap_memory
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.total_topologies = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.total_executors = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.total_workers = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.DOUBLE:
self.memory_usage = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.DOUBLE:
self.cpu_usage = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.DOUBLE:
self.memory_guarantee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.DOUBLE:
self.cpu_guarantee = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.DOUBLE:
self.memory_guarantee_remaining = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 10:
if ftype == TType.DOUBLE:
self.cpu_guarantee_remaining = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.I32:
self.isolated_node_guarantee = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.I32:
self.total_tasks = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.DOUBLE:
self.requested_on_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.DOUBLE:
self.requested_off_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.DOUBLE:
self.requested_total_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.DOUBLE:
self.requested_cpu = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.DOUBLE:
self.assigned_on_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.DOUBLE:
self.assigned_off_heap_memory = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('OwnerResourceSummary')
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 1)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
if self.total_topologies is not None:
oprot.writeFieldBegin('total_topologies', TType.I32, 2)
oprot.writeI32(self.total_topologies)
oprot.writeFieldEnd()
if self.total_executors is not None:
oprot.writeFieldBegin('total_executors', TType.I32, 3)
oprot.writeI32(self.total_executors)
oprot.writeFieldEnd()
if self.total_workers is not None:
oprot.writeFieldBegin('total_workers', TType.I32, 4)
oprot.writeI32(self.total_workers)
oprot.writeFieldEnd()
if self.memory_usage is not None:
oprot.writeFieldBegin('memory_usage', TType.DOUBLE, 5)
oprot.writeDouble(self.memory_usage)
oprot.writeFieldEnd()
if self.cpu_usage is not None:
oprot.writeFieldBegin('cpu_usage', TType.DOUBLE, 6)
oprot.writeDouble(self.cpu_usage)
oprot.writeFieldEnd()
if self.memory_guarantee is not None:
oprot.writeFieldBegin('memory_guarantee', TType.DOUBLE, 7)
oprot.writeDouble(self.memory_guarantee)
oprot.writeFieldEnd()
if self.cpu_guarantee is not None:
oprot.writeFieldBegin('cpu_guarantee', TType.DOUBLE, 8)
oprot.writeDouble(self.cpu_guarantee)
oprot.writeFieldEnd()
if self.memory_guarantee_remaining is not None:
oprot.writeFieldBegin('memory_guarantee_remaining', TType.DOUBLE, 9)
oprot.writeDouble(self.memory_guarantee_remaining)
oprot.writeFieldEnd()
if self.cpu_guarantee_remaining is not None:
oprot.writeFieldBegin('cpu_guarantee_remaining', TType.DOUBLE, 10)
oprot.writeDouble(self.cpu_guarantee_remaining)
oprot.writeFieldEnd()
if self.isolated_node_guarantee is not None:
oprot.writeFieldBegin('isolated_node_guarantee', TType.I32, 11)
oprot.writeI32(self.isolated_node_guarantee)
oprot.writeFieldEnd()
if self.total_tasks is not None:
oprot.writeFieldBegin('total_tasks', TType.I32, 12)
oprot.writeI32(self.total_tasks)
oprot.writeFieldEnd()
if self.requested_on_heap_memory is not None:
oprot.writeFieldBegin('requested_on_heap_memory', TType.DOUBLE, 13)
oprot.writeDouble(self.requested_on_heap_memory)
oprot.writeFieldEnd()
if self.requested_off_heap_memory is not None:
oprot.writeFieldBegin('requested_off_heap_memory', TType.DOUBLE, 14)
oprot.writeDouble(self.requested_off_heap_memory)
oprot.writeFieldEnd()
if self.requested_total_memory is not None:
oprot.writeFieldBegin('requested_total_memory', TType.DOUBLE, 15)
oprot.writeDouble(self.requested_total_memory)
oprot.writeFieldEnd()
if self.requested_cpu is not None:
oprot.writeFieldBegin('requested_cpu', TType.DOUBLE, 16)
oprot.writeDouble(self.requested_cpu)
oprot.writeFieldEnd()
if self.assigned_on_heap_memory is not None:
oprot.writeFieldBegin('assigned_on_heap_memory', TType.DOUBLE, 17)
oprot.writeDouble(self.assigned_on_heap_memory)
oprot.writeFieldEnd()
if self.assigned_off_heap_memory is not None:
oprot.writeFieldBegin('assigned_off_heap_memory', TType.DOUBLE, 18)
oprot.writeDouble(self.assigned_off_heap_memory)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.owner is None:
raise TProtocolException(message='Required field owner is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SupervisorWorkerHeartbeat(object):
"""
Attributes:
- storm_id
- executors
- time_secs
"""
def __init__(self, storm_id=None, executors=None, time_secs=None,):
self.storm_id = storm_id
self.executors = executors
self.time_secs = time_secs
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.storm_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.executors = []
(_etype780, _size777) = iprot.readListBegin()
for _i781 in range(_size777):
_elem782 = ExecutorInfo()
_elem782.read(iprot)
self.executors.append(_elem782)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.time_secs = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SupervisorWorkerHeartbeat')
if self.storm_id is not None:
oprot.writeFieldBegin('storm_id', TType.STRING, 1)
oprot.writeString(self.storm_id.encode('utf-8') if sys.version_info[0] == 2 else self.storm_id)
oprot.writeFieldEnd()
if self.executors is not None:
oprot.writeFieldBegin('executors', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.executors))
for iter783 in self.executors:
iter783.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.time_secs is not None:
oprot.writeFieldBegin('time_secs', TType.I32, 3)
oprot.writeI32(self.time_secs)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.storm_id is None:
raise TProtocolException(message='Required field storm_id is unset!')
if self.executors is None:
raise TProtocolException(message='Required field executors is unset!')
if self.time_secs is None:
raise TProtocolException(message='Required field time_secs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SupervisorWorkerHeartbeats(object):
"""
Attributes:
- supervisor_id
- worker_heartbeats
"""
def __init__(self, supervisor_id=None, worker_heartbeats=None,):
self.supervisor_id = supervisor_id
self.worker_heartbeats = worker_heartbeats
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.supervisor_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.worker_heartbeats = []
(_etype787, _size784) = iprot.readListBegin()
for _i788 in range(_size784):
_elem789 = SupervisorWorkerHeartbeat()
_elem789.read(iprot)
self.worker_heartbeats.append(_elem789)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SupervisorWorkerHeartbeats')
if self.supervisor_id is not None:
oprot.writeFieldBegin('supervisor_id', TType.STRING, 1)
oprot.writeString(self.supervisor_id.encode('utf-8') if sys.version_info[0] == 2 else self.supervisor_id)
oprot.writeFieldEnd()
if self.worker_heartbeats is not None:
oprot.writeFieldBegin('worker_heartbeats', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.worker_heartbeats))
for iter790 in self.worker_heartbeats:
iter790.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.supervisor_id is None:
raise TProtocolException(message='Required field supervisor_id is unset!')
if self.worker_heartbeats is None:
raise TProtocolException(message='Required field worker_heartbeats is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SupervisorAssignments(object):
"""
Attributes:
- storm_assignment
"""
def __init__(self, storm_assignment={
},):
if storm_assignment is self.thrift_spec[1][4]:
storm_assignment = {
}
self.storm_assignment = storm_assignment
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.storm_assignment = {}
(_ktype792, _vtype793, _size791) = iprot.readMapBegin()
for _i795 in range(_size791):
_key796 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val797 = Assignment()
_val797.read(iprot)
self.storm_assignment[_key796] = _val797
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SupervisorAssignments')
if self.storm_assignment is not None:
oprot.writeFieldBegin('storm_assignment', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.storm_assignment))
for kiter798, viter799 in self.storm_assignment.items():
oprot.writeString(kiter798.encode('utf-8') if sys.version_info[0] == 2 else kiter798)
viter799.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class WorkerMetricPoint(object):
"""
Attributes:
- metricName
- timestamp
- metricValue
- componentId
- executorId
- streamId
"""
def __init__(self, metricName=None, timestamp=None, metricValue=None, componentId=None, executorId=None, streamId=None,):
self.metricName = metricName
self.timestamp = timestamp
self.metricValue = metricValue
self.componentId = componentId
self.executorId = executorId
self.streamId = streamId
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.metricName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.timestamp = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.DOUBLE:
self.metricValue = iprot.readDouble()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.componentId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.executorId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.streamId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('WorkerMetricPoint')
if self.metricName is not None:
oprot.writeFieldBegin('metricName', TType.STRING, 1)
oprot.writeString(self.metricName.encode('utf-8') if sys.version_info[0] == 2 else self.metricName)
oprot.writeFieldEnd()
if self.timestamp is not None:
oprot.writeFieldBegin('timestamp', TType.I64, 2)
oprot.writeI64(self.timestamp)
oprot.writeFieldEnd()
if self.metricValue is not None:
oprot.writeFieldBegin('metricValue', TType.DOUBLE, 3)
oprot.writeDouble(self.metricValue)
oprot.writeFieldEnd()
if self.componentId is not None:
oprot.writeFieldBegin('componentId', TType.STRING, 4)
oprot.writeString(self.componentId.encode('utf-8') if sys.version_info[0] == 2 else self.componentId)
oprot.writeFieldEnd()
if self.executorId is not None:
oprot.writeFieldBegin('executorId', TType.STRING, 5)
oprot.writeString(self.executorId.encode('utf-8') if sys.version_info[0] == 2 else self.executorId)
oprot.writeFieldEnd()
if self.streamId is not None:
oprot.writeFieldBegin('streamId', TType.STRING, 6)
oprot.writeString(self.streamId.encode('utf-8') if sys.version_info[0] == 2 else self.streamId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.metricName is None:
raise TProtocolException(message='Required field metricName is unset!')
if self.timestamp is None:
raise TProtocolException(message='Required field timestamp is unset!')
if self.metricValue is None:
raise TProtocolException(message='Required field metricValue is unset!')
if self.componentId is None:
raise TProtocolException(message='Required field componentId is unset!')
if self.executorId is None:
raise TProtocolException(message='Required field executorId is unset!')
if self.streamId is None:
raise TProtocolException(message='Required field streamId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class WorkerMetricList(object):
"""
Attributes:
- metrics
"""
def __init__(self, metrics=None,):
self.metrics = metrics
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.metrics = []
(_etype803, _size800) = iprot.readListBegin()
for _i804 in range(_size800):
_elem805 = WorkerMetricPoint()
_elem805.read(iprot)
self.metrics.append(_elem805)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('WorkerMetricList')
if self.metrics is not None:
oprot.writeFieldBegin('metrics', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.metrics))
for iter806 in self.metrics:
iter806.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class WorkerMetrics(object):
"""
Attributes:
- topologyId
- port
- hostname
- metricList
"""
def __init__(self, topologyId=None, port=None, hostname=None, metricList=None,):
self.topologyId = topologyId
self.port = port
self.hostname = hostname
self.metricList = metricList
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.topologyId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.port = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.hostname = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.metricList = WorkerMetricList()
self.metricList.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('WorkerMetrics')
if self.topologyId is not None:
oprot.writeFieldBegin('topologyId', TType.STRING, 1)
oprot.writeString(self.topologyId.encode('utf-8') if sys.version_info[0] == 2 else self.topologyId)
oprot.writeFieldEnd()
if self.port is not None:
oprot.writeFieldBegin('port', TType.I32, 2)
oprot.writeI32(self.port)
oprot.writeFieldEnd()
if self.hostname is not None:
oprot.writeFieldBegin('hostname', TType.STRING, 3)
oprot.writeString(self.hostname.encode('utf-8') if sys.version_info[0] == 2 else self.hostname)
oprot.writeFieldEnd()
if self.metricList is not None:
oprot.writeFieldBegin('metricList', TType.STRUCT, 4)
self.metricList.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.topologyId is None:
raise TProtocolException(message='Required field topologyId is unset!')
if self.port is None:
raise TProtocolException(message='Required field port is unset!')
if self.hostname is None:
raise TProtocolException(message='Required field hostname is unset!')
if self.metricList is None:
raise TProtocolException(message='Required field metricList is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DRPCRequest(object):
"""
Attributes:
- func_args
- request_id
"""
def __init__(self, func_args=None, request_id=None,):
self.func_args = func_args
self.request_id = request_id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.func_args = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.request_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DRPCRequest')
if self.func_args is not None:
oprot.writeFieldBegin('func_args', TType.STRING, 1)
oprot.writeString(self.func_args.encode('utf-8') if sys.version_info[0] == 2 else self.func_args)
oprot.writeFieldEnd()
if self.request_id is not None:
oprot.writeFieldBegin('request_id', TType.STRING, 2)
oprot.writeString(self.request_id.encode('utf-8') if sys.version_info[0] == 2 else self.request_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.func_args is None:
raise TProtocolException(message='Required field func_args is unset!')
if self.request_id is None:
raise TProtocolException(message='Required field request_id is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class DRPCExecutionException(TException):
"""
Attributes:
- msg
- type
"""
def __init__(self, msg=None, type=None,):
self.msg = msg
self.type = type
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('DRPCExecutionException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 2)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HBPulse(object):
"""
Attributes:
- id
- details
"""
def __init__(self, id=None, details=None,):
self.id = id
self.details = details
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.details = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('HBPulse')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.details is not None:
oprot.writeFieldBegin('details', TType.STRING, 2)
oprot.writeBinary(self.details)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.id is None:
raise TProtocolException(message='Required field id is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HBRecords(object):
"""
Attributes:
- pulses
"""
def __init__(self, pulses=None,):
self.pulses = pulses
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.pulses = []
(_etype810, _size807) = iprot.readListBegin()
for _i811 in range(_size807):
_elem812 = HBPulse()
_elem812.read(iprot)
self.pulses.append(_elem812)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('HBRecords')
if self.pulses is not None:
oprot.writeFieldBegin('pulses', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.pulses))
for iter813 in self.pulses:
iter813.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HBNodes(object):
"""
Attributes:
- pulseIds
"""
def __init__(self, pulseIds=None,):
self.pulseIds = pulseIds
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.pulseIds = []
(_etype817, _size814) = iprot.readListBegin()
for _i818 in range(_size814):
_elem819 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.pulseIds.append(_elem819)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('HBNodes')
if self.pulseIds is not None:
oprot.writeFieldBegin('pulseIds', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.pulseIds))
for iter820 in self.pulseIds:
oprot.writeString(iter820.encode('utf-8') if sys.version_info[0] == 2 else iter820)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HBMessageData(object):
"""
Attributes:
- path
- pulse
- boolval
- records
- nodes
- message_blob
"""
def __init__(self, path=None, pulse=None, boolval=None, records=None, nodes=None, message_blob=None,):
self.path = path
self.pulse = pulse
self.boolval = boolval
self.records = records
self.nodes = nodes
self.message_blob = message_blob
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.pulse = HBPulse()
self.pulse.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.boolval = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.records = HBRecords()
self.records.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.nodes = HBNodes()
self.nodes.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.message_blob = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('HBMessageData')
if self.path is not None:
oprot.writeFieldBegin('path', TType.STRING, 1)
oprot.writeString(self.path.encode('utf-8') if sys.version_info[0] == 2 else self.path)
oprot.writeFieldEnd()
if self.pulse is not None:
oprot.writeFieldBegin('pulse', TType.STRUCT, 2)
self.pulse.write(oprot)
oprot.writeFieldEnd()
if self.boolval is not None:
oprot.writeFieldBegin('boolval', TType.BOOL, 3)
oprot.writeBool(self.boolval)
oprot.writeFieldEnd()
if self.records is not None:
oprot.writeFieldBegin('records', TType.STRUCT, 4)
self.records.write(oprot)
oprot.writeFieldEnd()
if self.nodes is not None:
oprot.writeFieldBegin('nodes', TType.STRUCT, 5)
self.nodes.write(oprot)
oprot.writeFieldEnd()
if self.message_blob is not None:
oprot.writeFieldBegin('message_blob', TType.STRING, 7)
oprot.writeBinary(self.message_blob)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HBMessage(object):
"""
Attributes:
- type
- data
- message_id
"""
def __init__(self, type=None, data=None, message_id=-1,):
self.type = type
self.data = data
self.message_id = message_id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.data = HBMessageData()
self.data.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.message_id = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('HBMessage')
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 1)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.data is not None:
oprot.writeFieldBegin('data', TType.STRUCT, 2)
self.data.write(oprot)
oprot.writeFieldEnd()
if self.message_id is not None:
oprot.writeFieldBegin('message_id', TType.I32, 3)
oprot.writeI32(self.message_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HBAuthorizationException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('HBAuthorizationException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class HBExecutionException(TException):
"""
Attributes:
- msg
"""
def __init__(self, msg=None,):
self.msg = msg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.msg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('HBExecutionException')
if self.msg is not None:
oprot.writeFieldBegin('msg', TType.STRING, 1)
oprot.writeString(self.msg.encode('utf-8') if sys.version_info[0] == 2 else self.msg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.msg is None:
raise TProtocolException(message='Required field msg is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class WorkerTokenInfo(object):
"""
Attributes:
- userName
- topologyId
- secretVersion
- expirationTimeMillis
"""
def __init__(self, userName=None, topologyId=None, secretVersion=None, expirationTimeMillis=None,):
self.userName = userName
self.topologyId = topologyId
self.secretVersion = secretVersion
self.expirationTimeMillis = expirationTimeMillis
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.userName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.topologyId = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.secretVersion = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I64:
self.expirationTimeMillis = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('WorkerTokenInfo')
if self.userName is not None:
oprot.writeFieldBegin('userName', TType.STRING, 1)
oprot.writeString(self.userName.encode('utf-8') if sys.version_info[0] == 2 else self.userName)
oprot.writeFieldEnd()
if self.topologyId is not None:
oprot.writeFieldBegin('topologyId', TType.STRING, 2)
oprot.writeString(self.topologyId.encode('utf-8') if sys.version_info[0] == 2 else self.topologyId)
oprot.writeFieldEnd()
if self.secretVersion is not None:
oprot.writeFieldBegin('secretVersion', TType.I64, 3)
oprot.writeI64(self.secretVersion)
oprot.writeFieldEnd()
if self.expirationTimeMillis is not None:
oprot.writeFieldBegin('expirationTimeMillis', TType.I64, 4)
oprot.writeI64(self.expirationTimeMillis)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.userName is None:
raise TProtocolException(message='Required field userName is unset!')
if self.topologyId is None:
raise TProtocolException(message='Required field topologyId is unset!')
if self.secretVersion is None:
raise TProtocolException(message='Required field secretVersion is unset!')
if self.expirationTimeMillis is None:
raise TProtocolException(message='Required field expirationTimeMillis is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class WorkerToken(object):
"""
Attributes:
- serviceType
- info
- signature
"""
def __init__(self, serviceType=None, info=None, signature=None,):
self.serviceType = serviceType
self.info = info
self.signature = signature
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.serviceType = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.info = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.signature = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('WorkerToken')
if self.serviceType is not None:
oprot.writeFieldBegin('serviceType', TType.I32, 1)
oprot.writeI32(self.serviceType)
oprot.writeFieldEnd()
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 2)
oprot.writeBinary(self.info)
oprot.writeFieldEnd()
if self.signature is not None:
oprot.writeFieldBegin('signature', TType.STRING, 3)
oprot.writeBinary(self.signature)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.serviceType is None:
raise TProtocolException(message='Required field serviceType is unset!')
if self.info is None:
raise TProtocolException(message='Required field info is unset!')
if self.signature is None:
raise TProtocolException(message='Required field signature is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class PrivateWorkerKey(object):
"""
Attributes:
- key
- userName
- expirationTimeMillis
"""
def __init__(self, key=None, userName=None, expirationTimeMillis=None,):
self.key = key
self.userName = userName
self.expirationTimeMillis = expirationTimeMillis
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.userName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.expirationTimeMillis = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('PrivateWorkerKey')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeBinary(self.key)
oprot.writeFieldEnd()
if self.userName is not None:
oprot.writeFieldBegin('userName', TType.STRING, 2)
oprot.writeString(self.userName.encode('utf-8') if sys.version_info[0] == 2 else self.userName)
oprot.writeFieldEnd()
if self.expirationTimeMillis is not None:
oprot.writeFieldBegin('expirationTimeMillis', TType.I64, 3)
oprot.writeI64(self.expirationTimeMillis)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.key is None:
raise TProtocolException(message='Required field key is unset!')
if self.userName is None:
raise TProtocolException(message='Required field userName is unset!')
if self.expirationTimeMillis is None:
raise TProtocolException(message='Required field expirationTimeMillis is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(JavaObjectArg)
JavaObjectArg.thrift_spec = (
None, # 0
(1, TType.I32, 'int_arg', None, None, ), # 1
(2, TType.I64, 'long_arg', None, None, ), # 2
(3, TType.STRING, 'string_arg', 'UTF8', None, ), # 3
(4, TType.BOOL, 'bool_arg', None, None, ), # 4
(5, TType.STRING, 'binary_arg', 'BINARY', None, ), # 5
(6, TType.DOUBLE, 'double_arg', None, None, ), # 6
)
all_structs.append(JavaObject)
JavaObject.thrift_spec = (
None, # 0
(1, TType.STRING, 'full_class_name', 'UTF8', None, ), # 1
(2, TType.LIST, 'args_list', (TType.STRUCT, [JavaObjectArg, None], False), None, ), # 2
)
all_structs.append(NullStruct)
NullStruct.thrift_spec = (
)
all_structs.append(GlobalStreamId)
GlobalStreamId.thrift_spec = (
None, # 0
(1, TType.STRING, 'componentId', 'UTF8', None, ), # 1
(2, TType.STRING, 'streamId', 'UTF8', None, ), # 2
)
all_structs.append(Grouping)
Grouping.thrift_spec = (
None, # 0
(1, TType.LIST, 'fields', (TType.STRING, 'UTF8', False), None, ), # 1
(2, TType.STRUCT, 'shuffle', [NullStruct, None], None, ), # 2
(3, TType.STRUCT, 'all', [NullStruct, None], None, ), # 3
(4, TType.STRUCT, 'none', [NullStruct, None], None, ), # 4
(5, TType.STRUCT, 'direct', [NullStruct, None], None, ), # 5
(6, TType.STRUCT, 'custom_object', [JavaObject, None], None, ), # 6
(7, TType.STRING, 'custom_serialized', 'BINARY', None, ), # 7
(8, TType.STRUCT, 'local_or_shuffle', [NullStruct, None], None, ), # 8
)
all_structs.append(StreamInfo)
StreamInfo.thrift_spec = (
None, # 0
(1, TType.LIST, 'output_fields', (TType.STRING, 'UTF8', False), None, ), # 1
(2, TType.BOOL, 'direct', None, None, ), # 2
)
all_structs.append(ShellComponent)
ShellComponent.thrift_spec = (
None, # 0
(1, TType.STRING, 'execution_command', 'UTF8', None, ), # 1
(2, TType.STRING, 'script', 'UTF8', None, ), # 2
)
all_structs.append(ComponentObject)
ComponentObject.thrift_spec = (
None, # 0
(1, TType.STRING, 'serialized_java', 'BINARY', None, ), # 1
(2, TType.STRUCT, 'shell', [ShellComponent, None], None, ), # 2
(3, TType.STRUCT, 'java_object', [JavaObject, None], None, ), # 3
)
all_structs.append(ComponentCommon)
ComponentCommon.thrift_spec = (
None, # 0
(1, TType.MAP, 'inputs', (TType.STRUCT, [GlobalStreamId, None], TType.STRUCT, [Grouping, None], False), None, ), # 1
(2, TType.MAP, 'streams', (TType.STRING, 'UTF8', TType.STRUCT, [StreamInfo, None], False), None, ), # 2
(3, TType.I32, 'parallelism_hint', None, None, ), # 3
(4, TType.STRING, 'json_conf', 'UTF8', None, ), # 4
)
all_structs.append(SpoutSpec)
SpoutSpec.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'spout_object', [ComponentObject, None], None, ), # 1
(2, TType.STRUCT, 'common', [ComponentCommon, None], None, ), # 2
)
all_structs.append(Bolt)
Bolt.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bolt_object', [ComponentObject, None], None, ), # 1
(2, TType.STRUCT, 'common', [ComponentCommon, None], None, ), # 2
)
all_structs.append(StateSpoutSpec)
StateSpoutSpec.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'state_spout_object', [ComponentObject, None], None, ), # 1
(2, TType.STRUCT, 'common', [ComponentCommon, None], None, ), # 2
)
all_structs.append(SharedMemory)
SharedMemory.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.DOUBLE, 'on_heap', None, None, ), # 2
(3, TType.DOUBLE, 'off_heap_worker', None, None, ), # 3
(4, TType.DOUBLE, 'off_heap_node', None, None, ), # 4
)
all_structs.append(StormTopology)
StormTopology.thrift_spec = (
None, # 0
(1, TType.MAP, 'spouts', (TType.STRING, 'UTF8', TType.STRUCT, [SpoutSpec, None], False), None, ), # 1
(2, TType.MAP, 'bolts', (TType.STRING, 'UTF8', TType.STRUCT, [Bolt, None], False), None, ), # 2
(3, TType.MAP, 'state_spouts', (TType.STRING, 'UTF8', TType.STRUCT, [StateSpoutSpec, None], False), None, ), # 3
(4, TType.LIST, 'worker_hooks', (TType.STRING, 'BINARY', False), None, ), # 4
(5, TType.LIST, 'dependency_jars', (TType.STRING, 'UTF8', False), None, ), # 5
(6, TType.LIST, 'dependency_artifacts', (TType.STRING, 'UTF8', False), None, ), # 6
(7, TType.STRING, 'storm_version', 'UTF8', None, ), # 7
(8, TType.STRING, 'jdk_version', 'UTF8', None, ), # 8
(9, TType.MAP, 'component_to_shared_memory', (TType.STRING, 'UTF8', TType.SET, (TType.STRING, 'UTF8', False), False), None, ), # 9
(10, TType.MAP, 'shared_memory', (TType.STRING, 'UTF8', TType.STRUCT, [SharedMemory, None], False), None, ), # 10
)
all_structs.append(AlreadyAliveException)
AlreadyAliveException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(NotAliveException)
NotAliveException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(AuthorizationException)
AuthorizationException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(InvalidTopologyException)
InvalidTopologyException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(KeyNotFoundException)
KeyNotFoundException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(IllegalStateException)
IllegalStateException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(KeyAlreadyExistsException)
KeyAlreadyExistsException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(TopologySummary)
TopologySummary.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRING, 'name', 'UTF8', None, ), # 2
(3, TType.I32, 'num_tasks', None, None, ), # 3
(4, TType.I32, 'num_executors', None, None, ), # 4
(5, TType.I32, 'num_workers', None, None, ), # 5
(6, TType.I32, 'uptime_secs', None, None, ), # 6
(7, TType.STRING, 'status', 'UTF8', None, ), # 7
(8, TType.STRING, 'storm_version', 'UTF8', None, ), # 8
(9, TType.STRING, 'topology_version', 'UTF8', None, ), # 9
None, # 10
None, # 11
None, # 12
None, # 13
None, # 14
None, # 15
None, # 16
None, # 17
None, # 18
None, # 19
None, # 20
None, # 21
None, # 22
None, # 23
None, # 24
None, # 25
None, # 26
None, # 27
None, # 28
None, # 29
None, # 30
None, # 31
None, # 32
None, # 33
None, # 34
None, # 35
None, # 36
None, # 37
None, # 38
None, # 39
None, # 40
None, # 41
None, # 42
None, # 43
None, # 44
None, # 45
None, # 46
None, # 47
None, # 48
None, # 49
None, # 50
None, # 51
None, # 52
None, # 53
None, # 54
None, # 55
None, # 56
None, # 57
None, # 58
None, # 59
None, # 60
None, # 61
None, # 62
None, # 63
None, # 64
None, # 65
None, # 66
None, # 67
None, # 68
None, # 69
None, # 70
None, # 71
None, # 72
None, # 73
None, # 74
None, # 75
None, # 76
None, # 77
None, # 78
None, # 79
None, # 80
None, # 81
None, # 82
None, # 83
None, # 84
None, # 85
None, # 86
None, # 87
None, # 88
None, # 89
None, # 90
None, # 91
None, # 92
None, # 93
None, # 94
None, # 95
None, # 96
None, # 97
None, # 98
None, # 99
None, # 100
None, # 101
None, # 102
None, # 103
None, # 104
None, # 105
None, # 106
None, # 107
None, # 108
None, # 109
None, # 110
None, # 111
None, # 112
None, # 113
None, # 114
None, # 115
None, # 116
None, # 117
None, # 118
None, # 119
None, # 120
None, # 121
None, # 122
None, # 123
None, # 124
None, # 125
None, # 126
None, # 127
None, # 128
None, # 129
None, # 130
None, # 131
None, # 132
None, # 133
None, # 134
None, # 135
None, # 136
None, # 137
None, # 138
None, # 139
None, # 140
None, # 141
None, # 142
None, # 143
None, # 144
None, # 145
None, # 146
None, # 147
None, # 148
None, # 149
None, # 150
None, # 151
None, # 152
None, # 153
None, # 154
None, # 155
None, # 156
None, # 157
None, # 158
None, # 159
None, # 160
None, # 161
None, # 162
None, # 163
None, # 164
None, # 165
None, # 166
None, # 167
None, # 168
None, # 169
None, # 170
None, # 171
None, # 172
None, # 173
None, # 174
None, # 175
None, # 176
None, # 177
None, # 178
None, # 179
None, # 180
None, # 181
None, # 182
None, # 183
None, # 184
None, # 185
None, # 186
None, # 187
None, # 188
None, # 189
None, # 190
None, # 191
None, # 192
None, # 193
None, # 194
None, # 195
None, # 196
None, # 197
None, # 198
None, # 199
None, # 200
None, # 201
None, # 202
None, # 203
None, # 204
None, # 205
None, # 206
None, # 207
None, # 208
None, # 209
None, # 210
None, # 211
None, # 212
None, # 213
None, # 214
None, # 215
None, # 216
None, # 217
None, # 218
None, # 219
None, # 220
None, # 221
None, # 222
None, # 223
None, # 224
None, # 225
None, # 226
None, # 227
None, # 228
None, # 229
None, # 230
None, # 231
None, # 232
None, # 233
None, # 234
None, # 235
None, # 236
None, # 237
None, # 238
None, # 239
None, # 240
None, # 241
None, # 242
None, # 243
None, # 244
None, # 245
None, # 246
None, # 247
None, # 248
None, # 249
None, # 250
None, # 251
None, # 252
None, # 253
None, # 254
None, # 255
None, # 256
None, # 257
None, # 258
None, # 259
None, # 260
None, # 261
None, # 262
None, # 263
None, # 264
None, # 265
None, # 266
None, # 267
None, # 268
None, # 269
None, # 270
None, # 271
None, # 272
None, # 273
None, # 274
None, # 275
None, # 276
None, # 277
None, # 278
None, # 279
None, # 280
None, # 281
None, # 282
None, # 283
None, # 284
None, # 285
None, # 286
None, # 287
None, # 288
None, # 289
None, # 290
None, # 291
None, # 292
None, # 293
None, # 294
None, # 295
None, # 296
None, # 297
None, # 298
None, # 299
None, # 300
None, # 301
None, # 302
None, # 303
None, # 304
None, # 305
None, # 306
None, # 307
None, # 308
None, # 309
None, # 310
None, # 311
None, # 312
None, # 313
None, # 314
None, # 315
None, # 316
None, # 317
None, # 318
None, # 319
None, # 320
None, # 321
None, # 322
None, # 323
None, # 324
None, # 325
None, # 326
None, # 327
None, # 328
None, # 329
None, # 330
None, # 331
None, # 332
None, # 333
None, # 334
None, # 335
None, # 336
None, # 337
None, # 338
None, # 339
None, # 340
None, # 341
None, # 342
None, # 343
None, # 344
None, # 345
None, # 346
None, # 347
None, # 348
None, # 349
None, # 350
None, # 351
None, # 352
None, # 353
None, # 354
None, # 355
None, # 356
None, # 357
None, # 358
None, # 359
None, # 360
None, # 361
None, # 362
None, # 363
None, # 364
None, # 365
None, # 366
None, # 367
None, # 368
None, # 369
None, # 370
None, # 371
None, # 372
None, # 373
None, # 374
None, # 375
None, # 376
None, # 377
None, # 378
None, # 379
None, # 380
None, # 381
None, # 382
None, # 383
None, # 384
None, # 385
None, # 386
None, # 387
None, # 388
None, # 389
None, # 390
None, # 391
None, # 392
None, # 393
None, # 394
None, # 395
None, # 396
None, # 397
None, # 398
None, # 399
None, # 400
None, # 401
None, # 402
None, # 403
None, # 404
None, # 405
None, # 406
None, # 407
None, # 408
None, # 409
None, # 410
None, # 411
None, # 412
None, # 413
None, # 414
None, # 415
None, # 416
None, # 417
None, # 418
None, # 419
None, # 420
None, # 421
None, # 422
None, # 423
None, # 424
None, # 425
None, # 426
None, # 427
None, # 428
None, # 429
None, # 430
None, # 431
None, # 432
None, # 433
None, # 434
None, # 435
None, # 436
None, # 437
None, # 438
None, # 439
None, # 440
None, # 441
None, # 442
None, # 443
None, # 444
None, # 445
None, # 446
None, # 447
None, # 448
None, # 449
None, # 450
None, # 451
None, # 452
None, # 453
None, # 454
None, # 455
None, # 456
None, # 457
None, # 458
None, # 459
None, # 460
None, # 461
None, # 462
None, # 463
None, # 464
None, # 465
None, # 466
None, # 467
None, # 468
None, # 469
None, # 470
None, # 471
None, # 472
None, # 473
None, # 474
None, # 475
None, # 476
None, # 477
None, # 478
None, # 479
None, # 480
None, # 481
None, # 482
None, # 483
None, # 484
None, # 485
None, # 486
None, # 487
None, # 488
None, # 489
None, # 490
None, # 491
None, # 492
None, # 493
None, # 494
None, # 495
None, # 496
None, # 497
None, # 498
None, # 499
None, # 500
None, # 501
None, # 502
None, # 503
None, # 504
None, # 505
None, # 506
None, # 507
None, # 508
None, # 509
None, # 510
None, # 511
None, # 512
(513, TType.STRING, 'sched_status', 'UTF8', None, ), # 513
(514, TType.STRING, 'owner', 'UTF8', None, ), # 514
(515, TType.I32, 'replication_count', None, None, ), # 515
None, # 516
None, # 517
None, # 518
None, # 519
None, # 520
(521, TType.DOUBLE, 'requested_memonheap', None, None, ), # 521
(522, TType.DOUBLE, 'requested_memoffheap', None, None, ), # 522
(523, TType.DOUBLE, 'requested_cpu', None, None, ), # 523
(524, TType.DOUBLE, 'assigned_memonheap', None, None, ), # 524
(525, TType.DOUBLE, 'assigned_memoffheap', None, None, ), # 525
(526, TType.DOUBLE, 'assigned_cpu', None, None, ), # 526
)
all_structs.append(SupervisorSummary)
SupervisorSummary.thrift_spec = (
None, # 0
(1, TType.STRING, 'host', 'UTF8', None, ), # 1
(2, TType.I32, 'uptime_secs', None, None, ), # 2
(3, TType.I32, 'num_workers', None, None, ), # 3
(4, TType.I32, 'num_used_workers', None, None, ), # 4
(5, TType.STRING, 'supervisor_id', 'UTF8', None, ), # 5
(6, TType.STRING, 'version', 'UTF8', "VERSION_NOT_PROVIDED", ), # 6
(7, TType.MAP, 'total_resources', (TType.STRING, 'UTF8', TType.DOUBLE, None, False), None, ), # 7
(8, TType.DOUBLE, 'used_mem', None, None, ), # 8
(9, TType.DOUBLE, 'used_cpu', None, None, ), # 9
(10, TType.DOUBLE, 'fragmented_mem', None, None, ), # 10
(11, TType.DOUBLE, 'fragmented_cpu', None, None, ), # 11
(12, TType.BOOL, 'blacklisted', None, None, ), # 12
)
all_structs.append(NimbusSummary)
NimbusSummary.thrift_spec = (
None, # 0
(1, TType.STRING, 'host', 'UTF8', None, ), # 1
(2, TType.I32, 'port', None, None, ), # 2
(3, TType.I32, 'uptime_secs', None, None, ), # 3
(4, TType.BOOL, 'isLeader', None, None, ), # 4
(5, TType.STRING, 'version', 'UTF8', None, ), # 5
)
all_structs.append(ClusterSummary)
ClusterSummary.thrift_spec = (
None, # 0
(1, TType.LIST, 'supervisors', (TType.STRUCT, [SupervisorSummary, None], False), None, ), # 1
None, # 2
(3, TType.LIST, 'topologies', (TType.STRUCT, [TopologySummary, None], False), None, ), # 3
(4, TType.LIST, 'nimbuses', (TType.STRUCT, [NimbusSummary, None], False), None, ), # 4
)
all_structs.append(ErrorInfo)
ErrorInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'error', 'UTF8', None, ), # 1
(2, TType.I32, 'error_time_secs', None, None, ), # 2
(3, TType.STRING, 'host', 'UTF8', None, ), # 3
(4, TType.I32, 'port', None, None, ), # 4
)
all_structs.append(BoltStats)
BoltStats.thrift_spec = (
None, # 0
(1, TType.MAP, 'acked', (TType.STRING, 'UTF8', TType.MAP, (TType.STRUCT, [GlobalStreamId, None], TType.I64, None, False), False), None, ), # 1
(2, TType.MAP, 'failed', (TType.STRING, 'UTF8', TType.MAP, (TType.STRUCT, [GlobalStreamId, None], TType.I64, None, False), False), None, ), # 2
(3, TType.MAP, 'process_ms_avg', (TType.STRING, 'UTF8', TType.MAP, (TType.STRUCT, [GlobalStreamId, None], TType.DOUBLE, None, False), False), None, ), # 3
(4, TType.MAP, 'executed', (TType.STRING, 'UTF8', TType.MAP, (TType.STRUCT, [GlobalStreamId, None], TType.I64, None, False), False), None, ), # 4
(5, TType.MAP, 'execute_ms_avg', (TType.STRING, 'UTF8', TType.MAP, (TType.STRUCT, [GlobalStreamId, None], TType.DOUBLE, None, False), False), None, ), # 5
)
all_structs.append(SpoutStats)
SpoutStats.thrift_spec = (
None, # 0
(1, TType.MAP, 'acked', (TType.STRING, 'UTF8', TType.MAP, (TType.STRING, 'UTF8', TType.I64, None, False), False), None, ), # 1
(2, TType.MAP, 'failed', (TType.STRING, 'UTF8', TType.MAP, (TType.STRING, 'UTF8', TType.I64, None, False), False), None, ), # 2
(3, TType.MAP, 'complete_ms_avg', (TType.STRING, 'UTF8', TType.MAP, (TType.STRING, 'UTF8', TType.DOUBLE, None, False), False), None, ), # 3
)
all_structs.append(ExecutorSpecificStats)
ExecutorSpecificStats.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bolt', [BoltStats, None], None, ), # 1
(2, TType.STRUCT, 'spout', [SpoutStats, None], None, ), # 2
)
all_structs.append(ExecutorStats)
ExecutorStats.thrift_spec = (
None, # 0
(1, TType.MAP, 'emitted', (TType.STRING, 'UTF8', TType.MAP, (TType.STRING, 'UTF8', TType.I64, None, False), False), None, ), # 1
(2, TType.MAP, 'transferred', (TType.STRING, 'UTF8', TType.MAP, (TType.STRING, 'UTF8', TType.I64, None, False), False), None, ), # 2
(3, TType.STRUCT, 'specific', [ExecutorSpecificStats, None], None, ), # 3
(4, TType.DOUBLE, 'rate', None, None, ), # 4
)
all_structs.append(ExecutorInfo)
ExecutorInfo.thrift_spec = (
None, # 0
(1, TType.I32, 'task_start', None, None, ), # 1
(2, TType.I32, 'task_end', None, None, ), # 2
)
all_structs.append(ExecutorSummary)
ExecutorSummary.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'executor_info', [ExecutorInfo, None], None, ), # 1
(2, TType.STRING, 'component_id', 'UTF8', None, ), # 2
(3, TType.STRING, 'host', 'UTF8', None, ), # 3
(4, TType.I32, 'port', None, None, ), # 4
(5, TType.I32, 'uptime_secs', None, None, ), # 5
None, # 6
(7, TType.STRUCT, 'stats', [ExecutorStats, None], None, ), # 7
)
all_structs.append(DebugOptions)
DebugOptions.thrift_spec = (
None, # 0
(1, TType.BOOL, 'enable', None, None, ), # 1
(2, TType.DOUBLE, 'samplingpct', None, None, ), # 2
)
all_structs.append(TopologyInfo)
TopologyInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRING, 'name', 'UTF8', None, ), # 2
(3, TType.I32, 'uptime_secs', None, None, ), # 3
(4, TType.LIST, 'executors', (TType.STRUCT, [ExecutorSummary, None], False), None, ), # 4
(5, TType.STRING, 'status', 'UTF8', None, ), # 5
(6, TType.MAP, 'errors', (TType.STRING, 'UTF8', TType.LIST, (TType.STRUCT, [ErrorInfo, None], False), False), None, ), # 6
(7, TType.MAP, 'component_debug', (TType.STRING, 'UTF8', TType.STRUCT, [DebugOptions, None], False), None, ), # 7
(8, TType.STRING, 'storm_version', 'UTF8', None, ), # 8
None, # 9
None, # 10
None, # 11
None, # 12
None, # 13
None, # 14
None, # 15
None, # 16
None, # 17
None, # 18
None, # 19
None, # 20
None, # 21
None, # 22
None, # 23
None, # 24
None, # 25
None, # 26
None, # 27
None, # 28
None, # 29
None, # 30
None, # 31
None, # 32
None, # 33
None, # 34
None, # 35
None, # 36
None, # 37
None, # 38
None, # 39
None, # 40
None, # 41
None, # 42
None, # 43
None, # 44
None, # 45
None, # 46
None, # 47
None, # 48
None, # 49
None, # 50
None, # 51
None, # 52
None, # 53
None, # 54
None, # 55
None, # 56
None, # 57
None, # 58
None, # 59
None, # 60
None, # 61
None, # 62
None, # 63
None, # 64
None, # 65
None, # 66
None, # 67
None, # 68
None, # 69
None, # 70
None, # 71
None, # 72
None, # 73
None, # 74
None, # 75
None, # 76
None, # 77
None, # 78
None, # 79
None, # 80
None, # 81
None, # 82
None, # 83
None, # 84
None, # 85
None, # 86
None, # 87
None, # 88
None, # 89
None, # 90
None, # 91
None, # 92
None, # 93
None, # 94
None, # 95
None, # 96
None, # 97
None, # 98
None, # 99
None, # 100
None, # 101
None, # 102
None, # 103
None, # 104
None, # 105
None, # 106
None, # 107
None, # 108
None, # 109
None, # 110
None, # 111
None, # 112
None, # 113
None, # 114
None, # 115
None, # 116
None, # 117
None, # 118
None, # 119
None, # 120
None, # 121
None, # 122
None, # 123
None, # 124
None, # 125
None, # 126
None, # 127
None, # 128
None, # 129
None, # 130
None, # 131
None, # 132
None, # 133
None, # 134
None, # 135
None, # 136
None, # 137
None, # 138
None, # 139
None, # 140
None, # 141
None, # 142
None, # 143
None, # 144
None, # 145
None, # 146
None, # 147
None, # 148
None, # 149
None, # 150
None, # 151
None, # 152
None, # 153
None, # 154
None, # 155
None, # 156
None, # 157
None, # 158
None, # 159
None, # 160
None, # 161
None, # 162
None, # 163
None, # 164
None, # 165
None, # 166
None, # 167
None, # 168
None, # 169
None, # 170
None, # 171
None, # 172
None, # 173
None, # 174
None, # 175
None, # 176
None, # 177
None, # 178
None, # 179
None, # 180
None, # 181
None, # 182
None, # 183
None, # 184
None, # 185
None, # 186
None, # 187
None, # 188
None, # 189
None, # 190
None, # 191
None, # 192
None, # 193
None, # 194
None, # 195
None, # 196
None, # 197
None, # 198
None, # 199
None, # 200
None, # 201
None, # 202
None, # 203
None, # 204
None, # 205
None, # 206
None, # 207
None, # 208
None, # 209
None, # 210
None, # 211
None, # 212
None, # 213
None, # 214
None, # 215
None, # 216
None, # 217
None, # 218
None, # 219
None, # 220
None, # 221
None, # 222
None, # 223
None, # 224
None, # 225
None, # 226
None, # 227
None, # 228
None, # 229
None, # 230
None, # 231
None, # 232
None, # 233
None, # 234
None, # 235
None, # 236
None, # 237
None, # 238
None, # 239
None, # 240
None, # 241
None, # 242
None, # 243
None, # 244
None, # 245
None, # 246
None, # 247
None, # 248
None, # 249
None, # 250
None, # 251
None, # 252
None, # 253
None, # 254
None, # 255
None, # 256
None, # 257
None, # 258
None, # 259
None, # 260
None, # 261
None, # 262
None, # 263
None, # 264
None, # 265
None, # 266
None, # 267
None, # 268
None, # 269
None, # 270
None, # 271
None, # 272
None, # 273
None, # 274
None, # 275
None, # 276
None, # 277
None, # 278
None, # 279
None, # 280
None, # 281
None, # 282
None, # 283
None, # 284
None, # 285
None, # 286
None, # 287
None, # 288
None, # 289
None, # 290
None, # 291
None, # 292
None, # 293
None, # 294
None, # 295
None, # 296
None, # 297
None, # 298
None, # 299
None, # 300
None, # 301
None, # 302
None, # 303
None, # 304
None, # 305
None, # 306
None, # 307
None, # 308
None, # 309
None, # 310
None, # 311
None, # 312
None, # 313
None, # 314
None, # 315
None, # 316
None, # 317
None, # 318
None, # 319
None, # 320
None, # 321
None, # 322
None, # 323
None, # 324
None, # 325
None, # 326
None, # 327
None, # 328
None, # 329
None, # 330
None, # 331
None, # 332
None, # 333
None, # 334
None, # 335
None, # 336
None, # 337
None, # 338
None, # 339
None, # 340
None, # 341
None, # 342
None, # 343
None, # 344
None, # 345
None, # 346
None, # 347
None, # 348
None, # 349
None, # 350
None, # 351
None, # 352
None, # 353
None, # 354
None, # 355
None, # 356
None, # 357
None, # 358
None, # 359
None, # 360
None, # 361
None, # 362
None, # 363
None, # 364
None, # 365
None, # 366
None, # 367
None, # 368
None, # 369
None, # 370
None, # 371
None, # 372
None, # 373
None, # 374
None, # 375
None, # 376
None, # 377
None, # 378
None, # 379
None, # 380
None, # 381
None, # 382
None, # 383
None, # 384
None, # 385
None, # 386
None, # 387
None, # 388
None, # 389
None, # 390
None, # 391
None, # 392
None, # 393
None, # 394
None, # 395
None, # 396
None, # 397
None, # 398
None, # 399
None, # 400
None, # 401
None, # 402
None, # 403
None, # 404
None, # 405
None, # 406
None, # 407
None, # 408
None, # 409
None, # 410
None, # 411
None, # 412
None, # 413
None, # 414
None, # 415
None, # 416
None, # 417
None, # 418
None, # 419
None, # 420
None, # 421
None, # 422
None, # 423
None, # 424
None, # 425
None, # 426
None, # 427
None, # 428
None, # 429
None, # 430
None, # 431
None, # 432
None, # 433
None, # 434
None, # 435
None, # 436
None, # 437
None, # 438
None, # 439
None, # 440
None, # 441
None, # 442
None, # 443
None, # 444
None, # 445
None, # 446
None, # 447
None, # 448
None, # 449
None, # 450
None, # 451
None, # 452
None, # 453
None, # 454
None, # 455
None, # 456
None, # 457
None, # 458
None, # 459
None, # 460
None, # 461
None, # 462
None, # 463
None, # 464
None, # 465
None, # 466
None, # 467
None, # 468
None, # 469
None, # 470
None, # 471
None, # 472
None, # 473
None, # 474
None, # 475
None, # 476
None, # 477
None, # 478
None, # 479
None, # 480
None, # 481
None, # 482
None, # 483
None, # 484
None, # 485
None, # 486
None, # 487
None, # 488
None, # 489
None, # 490
None, # 491
None, # 492
None, # 493
None, # 494
None, # 495
None, # 496
None, # 497
None, # 498
None, # 499
None, # 500
None, # 501
None, # 502
None, # 503
None, # 504
None, # 505
None, # 506
None, # 507
None, # 508
None, # 509
None, # 510
None, # 511
None, # 512
(513, TType.STRING, 'sched_status', 'UTF8', None, ), # 513
(514, TType.STRING, 'owner', 'UTF8', None, ), # 514
(515, TType.I32, 'replication_count', None, None, ), # 515
None, # 516
None, # 517
None, # 518
None, # 519
None, # 520
(521, TType.DOUBLE, 'requested_memonheap', None, None, ), # 521
(522, TType.DOUBLE, 'requested_memoffheap', None, None, ), # 522
(523, TType.DOUBLE, 'requested_cpu', None, None, ), # 523
(524, TType.DOUBLE, 'assigned_memonheap', None, None, ), # 524
(525, TType.DOUBLE, 'assigned_memoffheap', None, None, ), # 525
(526, TType.DOUBLE, 'assigned_cpu', None, None, ), # 526
)
all_structs.append(CommonAggregateStats)
CommonAggregateStats.thrift_spec = (
None, # 0
(1, TType.I32, 'num_executors', None, None, ), # 1
(2, TType.I32, 'num_tasks', None, None, ), # 2
(3, TType.I64, 'emitted', None, None, ), # 3
(4, TType.I64, 'transferred', None, None, ), # 4
(5, TType.I64, 'acked', None, None, ), # 5
(6, TType.I64, 'failed', None, None, ), # 6
(7, TType.MAP, 'resources_map', (TType.STRING, 'UTF8', TType.DOUBLE, None, False), None, ), # 7
)
all_structs.append(SpoutAggregateStats)
SpoutAggregateStats.thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'complete_latency_ms', None, None, ), # 1
)
all_structs.append(BoltAggregateStats)
BoltAggregateStats.thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'execute_latency_ms', None, None, ), # 1
(2, TType.DOUBLE, 'process_latency_ms', None, None, ), # 2
(3, TType.I64, 'executed', None, None, ), # 3
(4, TType.DOUBLE, 'capacity', None, None, ), # 4
)
all_structs.append(SpecificAggregateStats)
SpecificAggregateStats.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'bolt', [BoltAggregateStats, None], None, ), # 1
(2, TType.STRUCT, 'spout', [SpoutAggregateStats, None], None, ), # 2
)
all_structs.append(ComponentAggregateStats)
ComponentAggregateStats.thrift_spec = (
None, # 0
(1, TType.I32, 'type', None, None, ), # 1
(2, TType.STRUCT, 'common_stats', [CommonAggregateStats, None], None, ), # 2
(3, TType.STRUCT, 'specific_stats', [SpecificAggregateStats, None], None, ), # 3
(4, TType.STRUCT, 'last_error', [ErrorInfo, None], None, ), # 4
)
all_structs.append(TopologyStats)
TopologyStats.thrift_spec = (
None, # 0
(1, TType.MAP, 'window_to_emitted', (TType.STRING, 'UTF8', TType.I64, None, False), None, ), # 1
(2, TType.MAP, 'window_to_transferred', (TType.STRING, 'UTF8', TType.I64, None, False), None, ), # 2
(3, TType.MAP, 'window_to_complete_latencies_ms', (TType.STRING, 'UTF8', TType.DOUBLE, None, False), None, ), # 3
(4, TType.MAP, 'window_to_acked', (TType.STRING, 'UTF8', TType.I64, None, False), None, ), # 4
(5, TType.MAP, 'window_to_failed', (TType.STRING, 'UTF8', TType.I64, None, False), None, ), # 5
)
all_structs.append(WorkerSummary)
WorkerSummary.thrift_spec = (
None, # 0
(1, TType.STRING, 'supervisor_id', 'UTF8', None, ), # 1
(2, TType.STRING, 'host', 'UTF8', None, ), # 2
(3, TType.I32, 'port', None, None, ), # 3
(4, TType.STRING, 'topology_id', 'UTF8', None, ), # 4
(5, TType.STRING, 'topology_name', 'UTF8', None, ), # 5
(6, TType.I32, 'num_executors', None, None, ), # 6
(7, TType.MAP, 'component_to_num_tasks', (TType.STRING, 'UTF8', TType.I64, None, False), None, ), # 7
(8, TType.I32, 'time_secs', None, None, ), # 8
(9, TType.I32, 'uptime_secs', None, None, ), # 9
None, # 10
None, # 11
None, # 12
None, # 13
None, # 14
None, # 15
None, # 16
None, # 17
None, # 18
None, # 19
None, # 20
None, # 21
None, # 22
None, # 23
None, # 24
None, # 25
None, # 26
None, # 27
None, # 28
None, # 29
None, # 30
None, # 31
None, # 32
None, # 33
None, # 34
None, # 35
None, # 36
None, # 37
None, # 38
None, # 39
None, # 40
None, # 41
None, # 42
None, # 43
None, # 44
None, # 45
None, # 46
None, # 47
None, # 48
None, # 49
None, # 50
None, # 51
None, # 52
None, # 53
None, # 54
None, # 55
None, # 56
None, # 57
None, # 58
None, # 59
None, # 60
None, # 61
None, # 62
None, # 63
None, # 64
None, # 65
None, # 66
None, # 67
None, # 68
None, # 69
None, # 70
None, # 71
None, # 72
None, # 73
None, # 74
None, # 75
None, # 76
None, # 77
None, # 78
None, # 79
None, # 80
None, # 81
None, # 82
None, # 83
None, # 84
None, # 85
None, # 86
None, # 87
None, # 88
None, # 89
None, # 90
None, # 91
None, # 92
None, # 93
None, # 94
None, # 95
None, # 96
None, # 97
None, # 98
None, # 99
None, # 100
None, # 101
None, # 102
None, # 103
None, # 104
None, # 105
None, # 106
None, # 107
None, # 108
None, # 109
None, # 110
None, # 111
None, # 112
None, # 113
None, # 114
None, # 115
None, # 116
None, # 117
None, # 118
None, # 119
None, # 120
None, # 121
None, # 122
None, # 123
None, # 124
None, # 125
None, # 126
None, # 127
None, # 128
None, # 129
None, # 130
None, # 131
None, # 132
None, # 133
None, # 134
None, # 135
None, # 136
None, # 137
None, # 138
None, # 139
None, # 140
None, # 141
None, # 142
None, # 143
None, # 144
None, # 145
None, # 146
None, # 147
None, # 148
None, # 149
None, # 150
None, # 151
None, # 152
None, # 153
None, # 154
None, # 155
None, # 156
None, # 157
None, # 158
None, # 159
None, # 160
None, # 161
None, # 162
None, # 163
None, # 164
None, # 165
None, # 166
None, # 167
None, # 168
None, # 169
None, # 170
None, # 171
None, # 172
None, # 173
None, # 174
None, # 175
None, # 176
None, # 177
None, # 178
None, # 179
None, # 180
None, # 181
None, # 182
None, # 183
None, # 184
None, # 185
None, # 186
None, # 187
None, # 188
None, # 189
None, # 190
None, # 191
None, # 192
None, # 193
None, # 194
None, # 195
None, # 196
None, # 197
None, # 198
None, # 199
None, # 200
None, # 201
None, # 202
None, # 203
None, # 204
None, # 205
None, # 206
None, # 207
None, # 208
None, # 209
None, # 210
None, # 211
None, # 212
None, # 213
None, # 214
None, # 215
None, # 216
None, # 217
None, # 218
None, # 219
None, # 220
None, # 221
None, # 222
None, # 223
None, # 224
None, # 225
None, # 226
None, # 227
None, # 228
None, # 229
None, # 230
None, # 231
None, # 232
None, # 233
None, # 234
None, # 235
None, # 236
None, # 237
None, # 238
None, # 239
None, # 240
None, # 241
None, # 242
None, # 243
None, # 244
None, # 245
None, # 246
None, # 247
None, # 248
None, # 249
None, # 250
None, # 251
None, # 252
None, # 253
None, # 254
None, # 255
None, # 256
None, # 257
None, # 258
None, # 259
None, # 260
None, # 261
None, # 262
None, # 263
None, # 264
None, # 265
None, # 266
None, # 267
None, # 268
None, # 269
None, # 270
None, # 271
None, # 272
None, # 273
None, # 274
None, # 275
None, # 276
None, # 277
None, # 278
None, # 279
None, # 280
None, # 281
None, # 282
None, # 283
None, # 284
None, # 285
None, # 286
None, # 287
None, # 288
None, # 289
None, # 290
None, # 291
None, # 292
None, # 293
None, # 294
None, # 295
None, # 296
None, # 297
None, # 298
None, # 299
None, # 300
None, # 301
None, # 302
None, # 303
None, # 304
None, # 305
None, # 306
None, # 307
None, # 308
None, # 309
None, # 310
None, # 311
None, # 312
None, # 313
None, # 314
None, # 315
None, # 316
None, # 317
None, # 318
None, # 319
None, # 320
None, # 321
None, # 322
None, # 323
None, # 324
None, # 325
None, # 326
None, # 327
None, # 328
None, # 329
None, # 330
None, # 331
None, # 332
None, # 333
None, # 334
None, # 335
None, # 336
None, # 337
None, # 338
None, # 339
None, # 340
None, # 341
None, # 342
None, # 343
None, # 344
None, # 345
None, # 346
None, # 347
None, # 348
None, # 349
None, # 350
None, # 351
None, # 352
None, # 353
None, # 354
None, # 355
None, # 356
None, # 357
None, # 358
None, # 359
None, # 360
None, # 361
None, # 362
None, # 363
None, # 364
None, # 365
None, # 366
None, # 367
None, # 368
None, # 369
None, # 370
None, # 371
None, # 372
None, # 373
None, # 374
None, # 375
None, # 376
None, # 377
None, # 378
None, # 379
None, # 380
None, # 381
None, # 382
None, # 383
None, # 384
None, # 385
None, # 386
None, # 387
None, # 388
None, # 389
None, # 390
None, # 391
None, # 392
None, # 393
None, # 394
None, # 395
None, # 396
None, # 397
None, # 398
None, # 399
None, # 400
None, # 401
None, # 402
None, # 403
None, # 404
None, # 405
None, # 406
None, # 407
None, # 408
None, # 409
None, # 410
None, # 411
None, # 412
None, # 413
None, # 414
None, # 415
None, # 416
None, # 417
None, # 418
None, # 419
None, # 420
None, # 421
None, # 422
None, # 423
None, # 424
None, # 425
None, # 426
None, # 427
None, # 428
None, # 429
None, # 430
None, # 431
None, # 432
None, # 433
None, # 434
None, # 435
None, # 436
None, # 437
None, # 438
None, # 439
None, # 440
None, # 441
None, # 442
None, # 443
None, # 444
None, # 445
None, # 446
None, # 447
None, # 448
None, # 449
None, # 450
None, # 451
None, # 452
None, # 453
None, # 454
None, # 455
None, # 456
None, # 457
None, # 458
None, # 459
None, # 460
None, # 461
None, # 462
None, # 463
None, # 464
None, # 465
None, # 466
None, # 467
None, # 468
None, # 469
None, # 470
None, # 471
None, # 472
None, # 473
None, # 474
None, # 475
None, # 476
None, # 477
None, # 478
None, # 479
None, # 480
None, # 481
None, # 482
None, # 483
None, # 484
None, # 485
None, # 486
None, # 487
None, # 488
None, # 489
None, # 490
None, # 491
None, # 492
None, # 493
None, # 494
None, # 495
None, # 496
None, # 497
None, # 498
None, # 499
None, # 500
None, # 501
None, # 502
None, # 503
None, # 504
None, # 505
None, # 506
None, # 507
None, # 508
None, # 509
None, # 510
None, # 511
None, # 512
None, # 513
None, # 514
None, # 515
None, # 516
None, # 517
None, # 518
None, # 519
None, # 520
(521, TType.DOUBLE, 'requested_memonheap', None, None, ), # 521
(522, TType.DOUBLE, 'requested_memoffheap', None, None, ), # 522
(523, TType.DOUBLE, 'requested_cpu', None, None, ), # 523
(524, TType.DOUBLE, 'assigned_memonheap', None, None, ), # 524
(525, TType.DOUBLE, 'assigned_memoffheap', None, None, ), # 525
(526, TType.DOUBLE, 'assigned_cpu', None, None, ), # 526
(527, TType.STRING, 'owner', 'UTF8', None, ), # 527
)
all_structs.append(SupervisorPageInfo)
SupervisorPageInfo.thrift_spec = (
None, # 0
(1, TType.LIST, 'supervisor_summaries', (TType.STRUCT, [SupervisorSummary, None], False), None, ), # 1
(2, TType.LIST, 'worker_summaries', (TType.STRUCT, [WorkerSummary, None], False), None, ), # 2
)
all_structs.append(TopologyPageInfo)
TopologyPageInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRING, 'name', 'UTF8', None, ), # 2
(3, TType.I32, 'uptime_secs', None, None, ), # 3
(4, TType.STRING, 'status', 'UTF8', None, ), # 4
(5, TType.I32, 'num_tasks', None, None, ), # 5
(6, TType.I32, 'num_workers', None, None, ), # 6
(7, TType.I32, 'num_executors', None, None, ), # 7
(8, TType.STRING, 'topology_conf', 'UTF8', None, ), # 8
(9, TType.MAP, 'id_to_spout_agg_stats', (TType.STRING, 'UTF8', TType.STRUCT, [ComponentAggregateStats, None], False), None, ), # 9
(10, TType.MAP, 'id_to_bolt_agg_stats', (TType.STRING, 'UTF8', TType.STRUCT, [ComponentAggregateStats, None], False), None, ), # 10
(11, TType.STRING, 'sched_status', 'UTF8', None, ), # 11
(12, TType.STRUCT, 'topology_stats', [TopologyStats, None], None, ), # 12
(13, TType.STRING, 'owner', 'UTF8', None, ), # 13
(14, TType.STRUCT, 'debug_options', [DebugOptions, None], None, ), # 14
(15, TType.I32, 'replication_count', None, None, ), # 15
(16, TType.LIST, 'workers', (TType.STRUCT, [WorkerSummary, None], False), None, ), # 16
(17, TType.STRING, 'storm_version', 'UTF8', None, ), # 17
(18, TType.STRING, 'topology_version', 'UTF8', None, ), # 18
None, # 19
None, # 20
None, # 21
None, # 22
None, # 23
None, # 24
None, # 25
None, # 26
None, # 27
None, # 28
None, # 29
None, # 30
None, # 31
None, # 32
None, # 33
None, # 34
None, # 35
None, # 36
None, # 37
None, # 38
None, # 39
None, # 40
None, # 41
None, # 42
None, # 43
None, # 44
None, # 45
None, # 46
None, # 47
None, # 48
None, # 49
None, # 50
None, # 51
None, # 52
None, # 53
None, # 54
None, # 55
None, # 56
None, # 57
None, # 58
None, # 59
None, # 60
None, # 61
None, # 62
None, # 63
None, # 64
None, # 65
None, # 66
None, # 67
None, # 68
None, # 69
None, # 70
None, # 71
None, # 72
None, # 73
None, # 74
None, # 75
None, # 76
None, # 77
None, # 78
None, # 79
None, # 80
None, # 81
None, # 82
None, # 83
None, # 84
None, # 85
None, # 86
None, # 87
None, # 88
None, # 89
None, # 90
None, # 91
None, # 92
None, # 93
None, # 94
None, # 95
None, # 96
None, # 97
None, # 98
None, # 99
None, # 100
None, # 101
None, # 102
None, # 103
None, # 104
None, # 105
None, # 106
None, # 107
None, # 108
None, # 109
None, # 110
None, # 111
None, # 112
None, # 113
None, # 114
None, # 115
None, # 116
None, # 117
None, # 118
None, # 119
None, # 120
None, # 121
None, # 122
None, # 123
None, # 124
None, # 125
None, # 126
None, # 127
None, # 128
None, # 129
None, # 130
None, # 131
None, # 132
None, # 133
None, # 134
None, # 135
None, # 136
None, # 137
None, # 138
None, # 139
None, # 140
None, # 141
None, # 142
None, # 143
None, # 144
None, # 145
None, # 146
None, # 147
None, # 148
None, # 149
None, # 150
None, # 151
None, # 152
None, # 153
None, # 154
None, # 155
None, # 156
None, # 157
None, # 158
None, # 159
None, # 160
None, # 161
None, # 162
None, # 163
None, # 164
None, # 165
None, # 166
None, # 167
None, # 168
None, # 169
None, # 170
None, # 171
None, # 172
None, # 173
None, # 174
None, # 175
None, # 176
None, # 177
None, # 178
None, # 179
None, # 180
None, # 181
None, # 182
None, # 183
None, # 184
None, # 185
None, # 186
None, # 187
None, # 188
None, # 189
None, # 190
None, # 191
None, # 192
None, # 193
None, # 194
None, # 195
None, # 196
None, # 197
None, # 198
None, # 199
None, # 200
None, # 201
None, # 202
None, # 203
None, # 204
None, # 205
None, # 206
None, # 207
None, # 208
None, # 209
None, # 210
None, # 211
None, # 212
None, # 213
None, # 214
None, # 215
None, # 216
None, # 217
None, # 218
None, # 219
None, # 220
None, # 221
None, # 222
None, # 223
None, # 224
None, # 225
None, # 226
None, # 227
None, # 228
None, # 229
None, # 230
None, # 231
None, # 232
None, # 233
None, # 234
None, # 235
None, # 236
None, # 237
None, # 238
None, # 239
None, # 240
None, # 241
None, # 242
None, # 243
None, # 244
None, # 245
None, # 246
None, # 247
None, # 248
None, # 249
None, # 250
None, # 251
None, # 252
None, # 253
None, # 254
None, # 255
None, # 256
None, # 257
None, # 258
None, # 259
None, # 260
None, # 261
None, # 262
None, # 263
None, # 264
None, # 265
None, # 266
None, # 267
None, # 268
None, # 269
None, # 270
None, # 271
None, # 272
None, # 273
None, # 274
None, # 275
None, # 276
None, # 277
None, # 278
None, # 279
None, # 280
None, # 281
None, # 282
None, # 283
None, # 284
None, # 285
None, # 286
None, # 287
None, # 288
None, # 289
None, # 290
None, # 291
None, # 292
None, # 293
None, # 294
None, # 295
None, # 296
None, # 297
None, # 298
None, # 299
None, # 300
None, # 301
None, # 302
None, # 303
None, # 304
None, # 305
None, # 306
None, # 307
None, # 308
None, # 309
None, # 310
None, # 311
None, # 312
None, # 313
None, # 314
None, # 315
None, # 316
None, # 317
None, # 318
None, # 319
None, # 320
None, # 321
None, # 322
None, # 323
None, # 324
None, # 325
None, # 326
None, # 327
None, # 328
None, # 329
None, # 330
None, # 331
None, # 332
None, # 333
None, # 334
None, # 335
None, # 336
None, # 337
None, # 338
None, # 339
None, # 340
None, # 341
None, # 342
None, # 343
None, # 344
None, # 345
None, # 346
None, # 347
None, # 348
None, # 349
None, # 350
None, # 351
None, # 352
None, # 353
None, # 354
None, # 355
None, # 356
None, # 357
None, # 358
None, # 359
None, # 360
None, # 361
None, # 362
None, # 363
None, # 364
None, # 365
None, # 366
None, # 367
None, # 368
None, # 369
None, # 370
None, # 371
None, # 372
None, # 373
None, # 374
None, # 375
None, # 376
None, # 377
None, # 378
None, # 379
None, # 380
None, # 381
None, # 382
None, # 383
None, # 384
None, # 385
None, # 386
None, # 387
None, # 388
None, # 389
None, # 390
None, # 391
None, # 392
None, # 393
None, # 394
None, # 395
None, # 396
None, # 397
None, # 398
None, # 399
None, # 400
None, # 401
None, # 402
None, # 403
None, # 404
None, # 405
None, # 406
None, # 407
None, # 408
None, # 409
None, # 410
None, # 411
None, # 412
None, # 413
None, # 414
None, # 415
None, # 416
None, # 417
None, # 418
None, # 419
None, # 420
None, # 421
None, # 422
None, # 423
None, # 424
None, # 425
None, # 426
None, # 427
None, # 428
None, # 429
None, # 430
None, # 431
None, # 432
None, # 433
None, # 434
None, # 435
None, # 436
None, # 437
None, # 438
None, # 439
None, # 440
None, # 441
None, # 442
None, # 443
None, # 444
None, # 445
None, # 446
None, # 447
None, # 448
None, # 449
None, # 450
None, # 451
None, # 452
None, # 453
None, # 454
None, # 455
None, # 456
None, # 457
None, # 458
None, # 459
None, # 460
None, # 461
None, # 462
None, # 463
None, # 464
None, # 465
None, # 466
None, # 467
None, # 468
None, # 469
None, # 470
None, # 471
None, # 472
None, # 473
None, # 474
None, # 475
None, # 476
None, # 477
None, # 478
None, # 479
None, # 480
None, # 481
None, # 482
None, # 483
None, # 484
None, # 485
None, # 486
None, # 487
None, # 488
None, # 489
None, # 490
None, # 491
None, # 492
None, # 493
None, # 494
None, # 495
None, # 496
None, # 497
None, # 498
None, # 499
None, # 500
None, # 501
None, # 502
None, # 503
None, # 504
None, # 505
None, # 506
None, # 507
None, # 508
None, # 509
None, # 510
None, # 511
None, # 512
None, # 513
None, # 514
None, # 515
None, # 516
None, # 517
None, # 518
None, # 519
None, # 520
(521, TType.DOUBLE, 'requested_memonheap', None, None, ), # 521
(522, TType.DOUBLE, 'requested_memoffheap', None, None, ), # 522
(523, TType.DOUBLE, 'requested_cpu', None, None, ), # 523
(524, TType.DOUBLE, 'assigned_memonheap', None, None, ), # 524
(525, TType.DOUBLE, 'assigned_memoffheap', None, None, ), # 525
(526, TType.DOUBLE, 'assigned_cpu', None, None, ), # 526
(527, TType.DOUBLE, 'requested_regular_on_heap_memory', None, None, ), # 527
(528, TType.DOUBLE, 'requested_shared_on_heap_memory', None, None, ), # 528
(529, TType.DOUBLE, 'requested_regular_off_heap_memory', None, None, ), # 529
(530, TType.DOUBLE, 'requested_shared_off_heap_memory', None, None, ), # 530
(531, TType.DOUBLE, 'assigned_regular_on_heap_memory', None, None, ), # 531
(532, TType.DOUBLE, 'assigned_shared_on_heap_memory', None, None, ), # 532
(533, TType.DOUBLE, 'assigned_regular_off_heap_memory', None, None, ), # 533
(534, TType.DOUBLE, 'assigned_shared_off_heap_memory', None, None, ), # 534
)
all_structs.append(ExecutorAggregateStats)
ExecutorAggregateStats.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'exec_summary', [ExecutorSummary, None], None, ), # 1
(2, TType.STRUCT, 'stats', [ComponentAggregateStats, None], None, ), # 2
)
all_structs.append(ComponentPageInfo)
ComponentPageInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'component_id', 'UTF8', None, ), # 1
(2, TType.I32, 'component_type', None, None, ), # 2
(3, TType.STRING, 'topology_id', 'UTF8', None, ), # 3
(4, TType.STRING, 'topology_name', 'UTF8', None, ), # 4
(5, TType.I32, 'num_executors', None, None, ), # 5
(6, TType.I32, 'num_tasks', None, None, ), # 6
(7, TType.MAP, 'window_to_stats', (TType.STRING, 'UTF8', TType.STRUCT, [ComponentAggregateStats, None], False), None, ), # 7
(8, TType.MAP, 'gsid_to_input_stats', (TType.STRUCT, [GlobalStreamId, None], TType.STRUCT, [ComponentAggregateStats, None], False), None, ), # 8
(9, TType.MAP, 'sid_to_output_stats', (TType.STRING, 'UTF8', TType.STRUCT, [ComponentAggregateStats, None], False), None, ), # 9
(10, TType.LIST, 'exec_stats', (TType.STRUCT, [ExecutorAggregateStats, None], False), None, ), # 10
(11, TType.LIST, 'errors', (TType.STRUCT, [ErrorInfo, None], False), None, ), # 11
(12, TType.STRING, 'eventlog_host', 'UTF8', None, ), # 12
(13, TType.I32, 'eventlog_port', None, None, ), # 13
(14, TType.STRUCT, 'debug_options', [DebugOptions, None], None, ), # 14
(15, TType.STRING, 'topology_status', 'UTF8', None, ), # 15
(16, TType.MAP, 'resources_map', (TType.STRING, 'UTF8', TType.DOUBLE, None, False), None, ), # 16
)
all_structs.append(KillOptions)
KillOptions.thrift_spec = (
None, # 0
(1, TType.I32, 'wait_secs', None, None, ), # 1
)
all_structs.append(RebalanceOptions)
RebalanceOptions.thrift_spec = (
None, # 0
(1, TType.I32, 'wait_secs', None, None, ), # 1
(2, TType.I32, 'num_workers', None, None, ), # 2
(3, TType.MAP, 'num_executors', (TType.STRING, 'UTF8', TType.I32, None, False), None, ), # 3
(4, TType.MAP, 'topology_resources_overrides', (TType.STRING, 'UTF8', TType.MAP, (TType.STRING, 'UTF8', TType.DOUBLE, None, False), False), None, ), # 4
(5, TType.STRING, 'topology_conf_overrides', 'UTF8', None, ), # 5
(6, TType.STRING, 'principal', 'UTF8', None, ), # 6
)
all_structs.append(Credentials)
Credentials.thrift_spec = (
None, # 0
(1, TType.MAP, 'creds', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 1
(2, TType.STRING, 'topoOwner', 'UTF8', None, ), # 2
)
all_structs.append(SubmitOptions)
SubmitOptions.thrift_spec = (
None, # 0
(1, TType.I32, 'initial_status', None, None, ), # 1
(2, TType.STRUCT, 'creds', [Credentials, None], None, ), # 2
)
all_structs.append(AccessControl)
AccessControl.thrift_spec = (
None, # 0
(1, TType.I32, 'type', None, None, ), # 1
(2, TType.STRING, 'name', 'UTF8', None, ), # 2
(3, TType.I32, 'access', None, None, ), # 3
)
all_structs.append(SettableBlobMeta)
SettableBlobMeta.thrift_spec = (
None, # 0
(1, TType.LIST, 'acl', (TType.STRUCT, [AccessControl, None], False), None, ), # 1
(2, TType.I32, 'replication_factor', None, None, ), # 2
)
all_structs.append(ReadableBlobMeta)
ReadableBlobMeta.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'settable', [SettableBlobMeta, None], None, ), # 1
(2, TType.I64, 'version', None, None, ), # 2
)
all_structs.append(ListBlobsResult)
ListBlobsResult.thrift_spec = (
None, # 0
(1, TType.LIST, 'keys', (TType.STRING, 'UTF8', False), None, ), # 1
(2, TType.STRING, 'session', 'UTF8', None, ), # 2
)
all_structs.append(BeginDownloadResult)
BeginDownloadResult.thrift_spec = (
None, # 0
(1, TType.I64, 'version', None, None, ), # 1
(2, TType.STRING, 'session', 'UTF8', None, ), # 2
(3, TType.I64, 'data_size', None, None, ), # 3
)
all_structs.append(SupervisorInfo)
SupervisorInfo.thrift_spec = (
None, # 0
(1, TType.I64, 'time_secs', None, None, ), # 1
(2, TType.STRING, 'hostname', 'UTF8', None, ), # 2
(3, TType.STRING, 'assignment_id', 'UTF8', None, ), # 3
(4, TType.LIST, 'used_ports', (TType.I64, None, False), None, ), # 4
(5, TType.LIST, 'meta', (TType.I64, None, False), None, ), # 5
(6, TType.MAP, 'scheduler_meta', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 6
(7, TType.I64, 'uptime_secs', None, None, ), # 7
(8, TType.STRING, 'version', 'UTF8', None, ), # 8
(9, TType.MAP, 'resources_map', (TType.STRING, 'UTF8', TType.DOUBLE, None, False), None, ), # 9
(10, TType.I32, 'server_port', None, None, ), # 10
)
all_structs.append(NodeInfo)
NodeInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'node', 'UTF8', None, ), # 1
(2, TType.SET, 'port', (TType.I64, None, False), None, ), # 2
)
all_structs.append(WorkerResources)
WorkerResources.thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'mem_on_heap', None, None, ), # 1
(2, TType.DOUBLE, 'mem_off_heap', None, None, ), # 2
(3, TType.DOUBLE, 'cpu', None, None, ), # 3
(4, TType.DOUBLE, 'shared_mem_on_heap', None, None, ), # 4
(5, TType.DOUBLE, 'shared_mem_off_heap', None, None, ), # 5
(6, TType.MAP, 'resources', (TType.STRING, 'UTF8', TType.DOUBLE, None, False), None, ), # 6
(7, TType.MAP, 'shared_resources', (TType.STRING, 'UTF8', TType.DOUBLE, None, False), None, ), # 7
)
all_structs.append(Assignment)
Assignment.thrift_spec = (
None, # 0
(1, TType.STRING, 'master_code_dir', 'UTF8', None, ), # 1
(2, TType.MAP, 'node_host', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), {
}, ), # 2
(3, TType.MAP, 'executor_node_port', (TType.LIST, (TType.I64, None, False), TType.STRUCT, [NodeInfo, None], False), {
}, ), # 3
(4, TType.MAP, 'executor_start_time_secs', (TType.LIST, (TType.I64, None, False), TType.I64, None, False), {
}, ), # 4
(5, TType.MAP, 'worker_resources', (TType.STRUCT, [NodeInfo, None], TType.STRUCT, [WorkerResources, None], False), {
}, ), # 5
(6, TType.MAP, 'total_shared_off_heap', (TType.STRING, 'UTF8', TType.DOUBLE, None, False), {
}, ), # 6
(7, TType.STRING, 'owner', 'UTF8', None, ), # 7
)
all_structs.append(TopologyActionOptions)
TopologyActionOptions.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'kill_options', [KillOptions, None], None, ), # 1
(2, TType.STRUCT, 'rebalance_options', [RebalanceOptions, None], None, ), # 2
)
all_structs.append(StormBase)
StormBase.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.I32, 'status', None, None, ), # 2
(3, TType.I32, 'num_workers', None, None, ), # 3
(4, TType.MAP, 'component_executors', (TType.STRING, 'UTF8', TType.I32, None, False), None, ), # 4
(5, TType.I32, 'launch_time_secs', None, None, ), # 5
(6, TType.STRING, 'owner', 'UTF8', None, ), # 6
(7, TType.STRUCT, 'topology_action_options', [TopologyActionOptions, None], None, ), # 7
(8, TType.I32, 'prev_status', None, None, ), # 8
(9, TType.MAP, 'component_debug', (TType.STRING, 'UTF8', TType.STRUCT, [DebugOptions, None], False), None, ), # 9
(10, TType.STRING, 'principal', 'UTF8', None, ), # 10
(11, TType.STRING, 'topology_version', 'UTF8', None, ), # 11
)
all_structs.append(ClusterWorkerHeartbeat)
ClusterWorkerHeartbeat.thrift_spec = (
None, # 0
(1, TType.STRING, 'storm_id', 'UTF8', None, ), # 1
(2, TType.MAP, 'executor_stats', (TType.STRUCT, [ExecutorInfo, None], TType.STRUCT, [ExecutorStats, None], False), None, ), # 2
(3, TType.I32, 'time_secs', None, None, ), # 3
(4, TType.I32, 'uptime_secs', None, None, ), # 4
)
all_structs.append(ThriftSerializedObject)
ThriftSerializedObject.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.STRING, 'bits', 'BINARY', None, ), # 2
)
all_structs.append(LocalStateData)
LocalStateData.thrift_spec = (
None, # 0
(1, TType.MAP, 'serialized_parts', (TType.STRING, 'UTF8', TType.STRUCT, [ThriftSerializedObject, None], False), None, ), # 1
)
all_structs.append(LocalAssignment)
LocalAssignment.thrift_spec = (
None, # 0
(1, TType.STRING, 'topology_id', 'UTF8', None, ), # 1
(2, TType.LIST, 'executors', (TType.STRUCT, [ExecutorInfo, None], False), None, ), # 2
(3, TType.STRUCT, 'resources', [WorkerResources, None], None, ), # 3
(4, TType.DOUBLE, 'total_node_shared', None, None, ), # 4
(5, TType.STRING, 'owner', 'UTF8', None, ), # 5
)
all_structs.append(LSSupervisorId)
LSSupervisorId.thrift_spec = (
None, # 0
(1, TType.STRING, 'supervisor_id', 'UTF8', None, ), # 1
)
all_structs.append(LSApprovedWorkers)
LSApprovedWorkers.thrift_spec = (
None, # 0
(1, TType.MAP, 'approved_workers', (TType.STRING, 'UTF8', TType.I32, None, False), None, ), # 1
)
all_structs.append(LSSupervisorAssignments)
LSSupervisorAssignments.thrift_spec = (
None, # 0
(1, TType.MAP, 'assignments', (TType.I32, None, TType.STRUCT, [LocalAssignment, None], False), None, ), # 1
)
all_structs.append(LSWorkerHeartbeat)
LSWorkerHeartbeat.thrift_spec = (
None, # 0
(1, TType.I32, 'time_secs', None, None, ), # 1
(2, TType.STRING, 'topology_id', 'UTF8', None, ), # 2
(3, TType.LIST, 'executors', (TType.STRUCT, [ExecutorInfo, None], False), None, ), # 3
(4, TType.I32, 'port', None, None, ), # 4
)
all_structs.append(LSTopoHistory)
LSTopoHistory.thrift_spec = (
None, # 0
(1, TType.STRING, 'topology_id', 'UTF8', None, ), # 1
(2, TType.I64, 'time_stamp', None, None, ), # 2
(3, TType.LIST, 'users', (TType.STRING, 'UTF8', False), None, ), # 3
(4, TType.LIST, 'groups', (TType.STRING, 'UTF8', False), None, ), # 4
)
all_structs.append(LSTopoHistoryList)
LSTopoHistoryList.thrift_spec = (
None, # 0
(1, TType.LIST, 'topo_history', (TType.STRUCT, [LSTopoHistory, None], False), None, ), # 1
)
all_structs.append(ProfileRequest)
ProfileRequest.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'nodeInfo', [NodeInfo, None], None, ), # 1
(2, TType.I32, 'action', None, None, ), # 2
(3, TType.I64, 'time_stamp', None, None, ), # 3
)
all_structs.append(GetInfoOptions)
GetInfoOptions.thrift_spec = (
None, # 0
(1, TType.I32, 'num_err_choice', None, None, ), # 1
)
all_structs.append(LogLevel)
LogLevel.thrift_spec = (
None, # 0
(1, TType.I32, 'action', None, None, ), # 1
(2, TType.STRING, 'target_log_level', 'UTF8', None, ), # 2
(3, TType.I32, 'reset_log_level_timeout_secs', None, None, ), # 3
(4, TType.I64, 'reset_log_level_timeout_epoch', None, None, ), # 4
(5, TType.STRING, 'reset_log_level', 'UTF8', None, ), # 5
)
all_structs.append(LogConfig)
LogConfig.thrift_spec = (
None, # 0
None, # 1
(2, TType.MAP, 'named_logger_level', (TType.STRING, 'UTF8', TType.STRUCT, [LogLevel, None], False), None, ), # 2
)
all_structs.append(TopologyHistoryInfo)
TopologyHistoryInfo.thrift_spec = (
None, # 0
(1, TType.LIST, 'topo_ids', (TType.STRING, 'UTF8', False), None, ), # 1
)
all_structs.append(OwnerResourceSummary)
OwnerResourceSummary.thrift_spec = (
None, # 0
(1, TType.STRING, 'owner', 'UTF8', None, ), # 1
(2, TType.I32, 'total_topologies', None, None, ), # 2
(3, TType.I32, 'total_executors', None, None, ), # 3
(4, TType.I32, 'total_workers', None, None, ), # 4
(5, TType.DOUBLE, 'memory_usage', None, None, ), # 5
(6, TType.DOUBLE, 'cpu_usage', None, None, ), # 6
(7, TType.DOUBLE, 'memory_guarantee', None, None, ), # 7
(8, TType.DOUBLE, 'cpu_guarantee', None, None, ), # 8
(9, TType.DOUBLE, 'memory_guarantee_remaining', None, None, ), # 9
(10, TType.DOUBLE, 'cpu_guarantee_remaining', None, None, ), # 10
(11, TType.I32, 'isolated_node_guarantee', None, None, ), # 11
(12, TType.I32, 'total_tasks', None, None, ), # 12
(13, TType.DOUBLE, 'requested_on_heap_memory', None, None, ), # 13
(14, TType.DOUBLE, 'requested_off_heap_memory', None, None, ), # 14
(15, TType.DOUBLE, 'requested_total_memory', None, None, ), # 15
(16, TType.DOUBLE, 'requested_cpu', None, None, ), # 16
(17, TType.DOUBLE, 'assigned_on_heap_memory', None, None, ), # 17
(18, TType.DOUBLE, 'assigned_off_heap_memory', None, None, ), # 18
)
all_structs.append(SupervisorWorkerHeartbeat)
SupervisorWorkerHeartbeat.thrift_spec = (
None, # 0
(1, TType.STRING, 'storm_id', 'UTF8', None, ), # 1
(2, TType.LIST, 'executors', (TType.STRUCT, [ExecutorInfo, None], False), None, ), # 2
(3, TType.I32, 'time_secs', None, None, ), # 3
)
all_structs.append(SupervisorWorkerHeartbeats)
SupervisorWorkerHeartbeats.thrift_spec = (
None, # 0
(1, TType.STRING, 'supervisor_id', 'UTF8', None, ), # 1
(2, TType.LIST, 'worker_heartbeats', (TType.STRUCT, [SupervisorWorkerHeartbeat, None], False), None, ), # 2
)
all_structs.append(SupervisorAssignments)
SupervisorAssignments.thrift_spec = (
None, # 0
(1, TType.MAP, 'storm_assignment', (TType.STRING, 'UTF8', TType.STRUCT, [Assignment, None], False), {
}, ), # 1
)
all_structs.append(WorkerMetricPoint)
WorkerMetricPoint.thrift_spec = (
None, # 0
(1, TType.STRING, 'metricName', 'UTF8', None, ), # 1
(2, TType.I64, 'timestamp', None, None, ), # 2
(3, TType.DOUBLE, 'metricValue', None, None, ), # 3
(4, TType.STRING, 'componentId', 'UTF8', None, ), # 4
(5, TType.STRING, 'executorId', 'UTF8', None, ), # 5
(6, TType.STRING, 'streamId', 'UTF8', None, ), # 6
)
all_structs.append(WorkerMetricList)
WorkerMetricList.thrift_spec = (
None, # 0
(1, TType.LIST, 'metrics', (TType.STRUCT, [WorkerMetricPoint, None], False), None, ), # 1
)
all_structs.append(WorkerMetrics)
WorkerMetrics.thrift_spec = (
None, # 0
(1, TType.STRING, 'topologyId', 'UTF8', None, ), # 1
(2, TType.I32, 'port', None, None, ), # 2
(3, TType.STRING, 'hostname', 'UTF8', None, ), # 3
(4, TType.STRUCT, 'metricList', [WorkerMetricList, None], None, ), # 4
)
all_structs.append(DRPCRequest)
DRPCRequest.thrift_spec = (
None, # 0
(1, TType.STRING, 'func_args', 'UTF8', None, ), # 1
(2, TType.STRING, 'request_id', 'UTF8', None, ), # 2
)
all_structs.append(DRPCExecutionException)
DRPCExecutionException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
(2, TType.I32, 'type', None, None, ), # 2
)
all_structs.append(HBPulse)
HBPulse.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRING, 'details', 'BINARY', None, ), # 2
)
all_structs.append(HBRecords)
HBRecords.thrift_spec = (
None, # 0
(1, TType.LIST, 'pulses', (TType.STRUCT, [HBPulse, None], False), None, ), # 1
)
all_structs.append(HBNodes)
HBNodes.thrift_spec = (
None, # 0
(1, TType.LIST, 'pulseIds', (TType.STRING, 'UTF8', False), None, ), # 1
)
all_structs.append(HBMessageData)
HBMessageData.thrift_spec = (
None, # 0
(1, TType.STRING, 'path', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'pulse', [HBPulse, None], None, ), # 2
(3, TType.BOOL, 'boolval', None, None, ), # 3
(4, TType.STRUCT, 'records', [HBRecords, None], None, ), # 4
(5, TType.STRUCT, 'nodes', [HBNodes, None], None, ), # 5
None, # 6
(7, TType.STRING, 'message_blob', 'BINARY', None, ), # 7
)
all_structs.append(HBMessage)
HBMessage.thrift_spec = (
None, # 0
(1, TType.I32, 'type', None, None, ), # 1
(2, TType.STRUCT, 'data', [HBMessageData, None], None, ), # 2
(3, TType.I32, 'message_id', None, -1, ), # 3
)
all_structs.append(HBAuthorizationException)
HBAuthorizationException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(HBExecutionException)
HBExecutionException.thrift_spec = (
None, # 0
(1, TType.STRING, 'msg', 'UTF8', None, ), # 1
)
all_structs.append(WorkerTokenInfo)
WorkerTokenInfo.thrift_spec = (
None, # 0
(1, TType.STRING, 'userName', 'UTF8', None, ), # 1
(2, TType.STRING, 'topologyId', 'UTF8', None, ), # 2
(3, TType.I64, 'secretVersion', None, None, ), # 3
(4, TType.I64, 'expirationTimeMillis', None, None, ), # 4
)
all_structs.append(WorkerToken)
WorkerToken.thrift_spec = (
None, # 0
(1, TType.I32, 'serviceType', None, None, ), # 1
(2, TType.STRING, 'info', 'BINARY', None, ), # 2
(3, TType.STRING, 'signature', 'BINARY', None, ), # 3
)
all_structs.append(PrivateWorkerKey)
PrivateWorkerKey.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'BINARY', None, ), # 1
(2, TType.STRING, 'userName', 'UTF8', None, ), # 2
(3, TType.I64, 'expirationTimeMillis', None, None, ), # 3
)
fix_spec(all_structs)
del all_structs
| 36.168236
| 808
| 0.557572
| 51,451
| 472,321
| 4.912499
| 0.033974
| 0.015173
| 0.027311
| 0.03454
| 0.80678
| 0.774202
| 0.715256
| 0.677385
| 0.654006
| 0.639981
| 0
| 0.044067
| 0.335151
| 472,321
| 13,058
| 809
| 36.171006
| 0.760822
| 0.035791
| 0
| 0.724863
| 0
| 0
| 0.044796
| 0.004769
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059142
| false
| 0
| 0.000443
| 0.020128
| 0.125732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1670cf6142eed8f46759590cf568a7d3ce322621
| 225
|
py
|
Python
|
views/home.py
|
yekudename/reader
|
93955051d1ac959f7713e66f67bf8b57d8b13df2
|
[
"Apache-2.0"
] | null | null | null |
views/home.py
|
yekudename/reader
|
93955051d1ac959f7713e66f67bf8b57d8b13df2
|
[
"Apache-2.0"
] | null | null | null |
views/home.py
|
yekudename/reader
|
93955051d1ac959f7713e66f67bf8b57d8b13df2
|
[
"Apache-2.0"
] | null | null | null |
import os
from flask import Blueprint, render_template
home_bp = Blueprint('home', __name__, template_folder='../template', static_folder='../static')
@home_bp.route('/')
def home():
return render_template('index.html')
| 28.125
| 95
| 0.737778
| 29
| 225
| 5.37931
| 0.586207
| 0.179487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102222
| 225
| 8
| 96
| 28.125
| 0.772277
| 0
| 0
| 0
| 0
| 0
| 0.154867
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
1690435071ec041d2beaca0c2c32b6e29cf4d478
| 90
|
py
|
Python
|
alpha_vantage/models/__init__.py
|
wstolk/alpha-vantage
|
d1d2d5513e5e1c204342da40019a633a5f75a3aa
|
[
"Apache-2.0"
] | null | null | null |
alpha_vantage/models/__init__.py
|
wstolk/alpha-vantage
|
d1d2d5513e5e1c204342da40019a633a5f75a3aa
|
[
"Apache-2.0"
] | null | null | null |
alpha_vantage/models/__init__.py
|
wstolk/alpha-vantage
|
d1d2d5513e5e1c204342da40019a633a5f75a3aa
|
[
"Apache-2.0"
] | null | null | null |
from alpha_vantage.models.timeseries import MetadataModel, TimeSeriesModel, TimeSerieModel
| 90
| 90
| 0.9
| 9
| 90
| 8.888889
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 90
| 1
| 90
| 90
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
169f7032c823541606a2320303414cf449a77a2d
| 99
|
py
|
Python
|
resolwe/test_helpers/__init__.py
|
plojyon/resolwe
|
1bee6f0860fdd087534adf1680e9350d79ab97cf
|
[
"Apache-2.0"
] | 27
|
2015-12-07T18:29:12.000Z
|
2022-03-16T08:01:47.000Z
|
resolwe/test_helpers/__init__.py
|
plojyon/resolwe
|
1bee6f0860fdd087534adf1680e9350d79ab97cf
|
[
"Apache-2.0"
] | 681
|
2015-12-01T11:52:24.000Z
|
2022-03-21T07:43:37.000Z
|
resolwe/test_helpers/__init__.py
|
plojyon/resolwe
|
1bee6f0860fdd087534adf1680e9350d79ab97cf
|
[
"Apache-2.0"
] | 28
|
2015-12-01T08:32:57.000Z
|
2021-12-14T00:04:16.000Z
|
""".. Ignore pydocstyle D400.
====================
Resolwe Test Helpers
====================
"""
| 12.375
| 29
| 0.383838
| 6
| 99
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034483
| 0.121212
| 99
| 7
| 30
| 14.142857
| 0.402299
| 0.909091
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
16a798a32059cf0f0313c892aa59b77396c22011
| 59
|
py
|
Python
|
python_translator/__init__.py
|
bentettmar/python-translator
|
7f4144e7ad80d54a8ad19ddee1d0745a98d3111a
|
[
"MIT"
] | 2
|
2022-03-12T08:04:25.000Z
|
2022-03-13T18:00:28.000Z
|
python_translator/__init__.py
|
bentettmar/python-translator
|
7f4144e7ad80d54a8ad19ddee1d0745a98d3111a
|
[
"MIT"
] | 2
|
2022-03-11T08:39:18.000Z
|
2022-03-13T17:36:34.000Z
|
python_translator/__init__.py
|
bentettmar/python-translator
|
7f4144e7ad80d54a8ad19ddee1d0745a98d3111a
|
[
"MIT"
] | 1
|
2022-03-11T00:30:29.000Z
|
2022-03-11T00:30:29.000Z
|
from .translator import Translator
from . import exceptions
| 29.5
| 34
| 0.847458
| 7
| 59
| 7.142857
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118644
| 59
| 2
| 35
| 29.5
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
16a886418786fef2e8877affb148e9e636d3f0fe
| 272
|
py
|
Python
|
common/facade/api/__init__.py
|
Softeq/PyCats
|
c71d54ca7fa958c22ca8c78ba9889c6a32b827be
|
[
"Apache-2.0"
] | 7
|
2020-06-12T15:16:10.000Z
|
2020-06-20T18:42:07.000Z
|
common/facade/api/__init__.py
|
Softeq/PyCats
|
c71d54ca7fa958c22ca8c78ba9889c6a32b827be
|
[
"Apache-2.0"
] | 4
|
2020-06-15T20:08:32.000Z
|
2020-06-29T16:51:57.000Z
|
common/facade/api/__init__.py
|
Softeq/SCAF
|
c71d54ca7fa958c22ca8c78ba9889c6a32b827be
|
[
"Apache-2.0"
] | 3
|
2020-07-27T10:45:36.000Z
|
2021-01-13T12:10:46.000Z
|
# REST QA API imports
from common._rest_qa_api.base_endpoint import BaseEndpoint, endpoint_factory, BaseRequestModel, BaseResponseModel # noqa
from common._rest_qa_api import rest_checkers # noqa
from common._rest_qa_api.rest_utils import SKIP, pycats_dataclass # noqa
| 54.4
| 121
| 0.841912
| 39
| 272
| 5.512821
| 0.487179
| 0.111628
| 0.167442
| 0.223256
| 0.302326
| 0.213953
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110294
| 272
| 4
| 122
| 68
| 0.88843
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bc59d178d002dbe7c6c646f234beb3a8bb8cd27c
| 275
|
py
|
Python
|
pyhcl/dsl/dslchecker.py
|
raybdzhou/PyChip-py-hcl
|
08edc6ad4d2978eb417482f6f92678f8f9a1e3c7
|
[
"MIT"
] | 1
|
2021-12-10T14:02:54.000Z
|
2021-12-10T14:02:54.000Z
|
pyhcl/dsl/dslchecker.py
|
raybdzhou/PyChip-py-hcl
|
08edc6ad4d2978eb417482f6f92678f8f9a1e3c7
|
[
"MIT"
] | null | null | null |
pyhcl/dsl/dslchecker.py
|
raybdzhou/PyChip-py-hcl
|
08edc6ad4d2978eb417482f6f92678f8f9a1e3c7
|
[
"MIT"
] | 1
|
2022-03-04T03:36:01.000Z
|
2022-03-04T03:36:01.000Z
|
from pyhcl.core._clock_manager import Clock_manager
class DslChecker:
@classmethod
def do_check(cls, vic):
DslChecker.clock_check(vic)
@classmethod
def clock_check(cls, vic):
pass
@classmethod
def loop_check(cls, vic):
pass
| 18.333333
| 51
| 0.661818
| 34
| 275
| 5.147059
| 0.470588
| 0.24
| 0.188571
| 0.171429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.261818
| 275
| 15
| 52
| 18.333333
| 0.862069
| 0
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0.181818
| 0.090909
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
bc96e66ba331b961d6ebc718883c1c5f3e3e8635
| 229
|
py
|
Python
|
csrank/dataset_reader/labelranking/__init__.py
|
kiudee/cs-ranking
|
47cf648fa286c37b9214bbad1926004d4d7d9796
|
[
"Apache-2.0"
] | 65
|
2018-02-12T13:18:13.000Z
|
2021-12-18T12:01:51.000Z
|
csrank/dataset_reader/labelranking/__init__.py
|
kiudee/cs-ranking
|
47cf648fa286c37b9214bbad1926004d4d7d9796
|
[
"Apache-2.0"
] | 189
|
2018-02-13T10:11:55.000Z
|
2022-03-12T16:36:23.000Z
|
csrank/dataset_reader/labelranking/__init__.py
|
kiudee/cs-ranking
|
47cf648fa286c37b9214bbad1926004d4d7d9796
|
[
"Apache-2.0"
] | 19
|
2018-03-08T15:39:31.000Z
|
2020-11-18T12:46:36.000Z
|
from .intelligent_system_group_dataset_reader import IntelligentSystemGroupDatasetReader
from .survey_dataset_reader import SurveyDatasetReader
__all__ = [
"IntelligentSystemGroupDatasetReader",
"SurveyDatasetReader",
]
| 28.625
| 88
| 0.851528
| 17
| 229
| 10.882353
| 0.647059
| 0.140541
| 0.205405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100437
| 229
| 7
| 89
| 32.714286
| 0.898058
| 0
| 0
| 0
| 0
| 0
| 0.235808
| 0.152838
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bcb2d5c62997ad5c3bde04bb69e117d3261be072
| 74
|
py
|
Python
|
EasySelenium/webdriver/__init__.py
|
HHongSeungWoo/EasySelenium
|
9b1c23d36f115b4b08ef8046c6c862f4bcafac17
|
[
"MIT"
] | null | null | null |
EasySelenium/webdriver/__init__.py
|
HHongSeungWoo/EasySelenium
|
9b1c23d36f115b4b08ef8046c6c862f4bcafac17
|
[
"MIT"
] | null | null | null |
EasySelenium/webdriver/__init__.py
|
HHongSeungWoo/EasySelenium
|
9b1c23d36f115b4b08ef8046c6c862f4bcafac17
|
[
"MIT"
] | null | null | null |
from selenium.webdriver import *
from .driver import Ie, Chrome, Firefox
| 18.5
| 39
| 0.783784
| 10
| 74
| 5.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148649
| 74
| 3
| 40
| 24.666667
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bcd29b8658b90a79ab5b62042bd2b76d9b31076d
| 24
|
py
|
Python
|
venv/Lib/site-packages/sindy.py
|
ayandeephazra/Chemical_Origins_Of_Life
|
5858b51063c53b2db00529fe79a67d321193171e
|
[
"MIT"
] | 1
|
2021-08-01T18:22:51.000Z
|
2021-08-01T18:22:51.000Z
|
venv/Lib/site-packages/sindy.py
|
ayandeephazra/Chemical_Origins_Of_Life
|
5858b51063c53b2db00529fe79a67d321193171e
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/sindy.py
|
ayandeephazra/Chemical_Origins_Of_Life
|
5858b51063c53b2db00529fe79a67d321193171e
|
[
"MIT"
] | null | null | null |
from sparsereg import *
| 12
| 23
| 0.791667
| 3
| 24
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bcef4709778c7a5c345f9df9ccc80dc13888e4b5
| 119
|
py
|
Python
|
src/flask/src/server.py
|
silconari/CORE-project-penguins-
|
d371311ef03b990d0787e52d981a44a792245ee2
|
[
"Unlicense"
] | null | null | null |
src/flask/src/server.py
|
silconari/CORE-project-penguins-
|
d371311ef03b990d0787e52d981a44a792245ee2
|
[
"Unlicense"
] | 1
|
2021-08-19T14:03:34.000Z
|
2021-08-21T16:20:16.000Z
|
src/flask/src/server.py
|
silconari/CORE-project-penguins-
|
d371311ef03b990d0787e52d981a44a792245ee2
|
[
"Unlicense"
] | null | null | null |
from config import PORT
import controllers.root_controllers
from app import app
app.run("0.0.0.0", PORT, debug=True)
| 17
| 36
| 0.773109
| 21
| 119
| 4.333333
| 0.52381
| 0.065934
| 0.065934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.12605
| 119
| 6
| 37
| 19.833333
| 0.836538
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bcfdec3885ceaaa1c96ffc1b3998f083c5009c02
| 368
|
py
|
Python
|
medusa/lang/object_oriented/inherit.py
|
deadwind4/medusa
|
f2d2041349e70a7321f0e1f1c30137ac8769edd5
|
[
"MIT"
] | null | null | null |
medusa/lang/object_oriented/inherit.py
|
deadwind4/medusa
|
f2d2041349e70a7321f0e1f1c30137ac8769edd5
|
[
"MIT"
] | null | null | null |
medusa/lang/object_oriented/inherit.py
|
deadwind4/medusa
|
f2d2041349e70a7321f0e1f1c30137ac8769edd5
|
[
"MIT"
] | null | null | null |
class Base:
def __init__(self, name):
self.name = name
class Foo(Base):
def __init__(self, name, time):
Base.__init__(self, name)
self.time = time
class BaseA:
def __init__(self):
print("BaseA")
class FooA(BaseA):
def __init__(self):
BaseA.__init__(self)
super().__init__()
pass
foo = FooA()
| 14.72
| 35
| 0.576087
| 45
| 368
| 4.088889
| 0.288889
| 0.26087
| 0.23913
| 0.163043
| 0.206522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.298913
| 368
| 24
| 36
| 15.333333
| 0.713178
| 0
| 0
| 0.125
| 0
| 0
| 0.013587
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.0625
| 0
| 0
| 0.5
| 0.0625
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4c1f918cc3f21841f18878865e6dba0ea7334b25
| 2,212
|
py
|
Python
|
backend/sb_db/__init__.py
|
arontaupe/KommunikationsKrake
|
145bf9a2b4b3d70635987d18a6a0d4d8438bfb96
|
[
"MIT"
] | null | null | null |
backend/sb_db/__init__.py
|
arontaupe/KommunikationsKrake
|
145bf9a2b4b3d70635987d18a6a0d4d8438bfb96
|
[
"MIT"
] | null | null | null |
backend/sb_db/__init__.py
|
arontaupe/KommunikationsKrake
|
145bf9a2b4b3d70635987d18a6a0d4d8438bfb96
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# flake8: noqa
"""
Sommerblut-Database
Event and festival info # noqa: E501
OpenAPI spec version: 1.5.0
Contact: support@xtain.net
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import apis into sdk package
from sb_db.api.accessibilities_api import AccessibilitiesApi
from sb_db.api.categories_api import CategoriesApi
from sb_db.api.dates_of_events_api import DatesOfEventsApi
from sb_db.api.events_api import EventsApi
from sb_db.api.festivals_api import FestivalsApi
from sb_db.api.locations_api import LocationsApi
from sb_db.api.running_events_api import RunningEventsApi
from sb_db.api.running_streams_api import RunningStreamsApi
from sb_db.api.stages_api import StagesApi
from sb_db.api.tags_api import TagsApi
# import ApiClient
from sb_db.api_client import ApiClient
from sb_db.configuration import Configuration
# import models into sdk package
from sb_db.models.accessibilities import Accessibilities
from sb_db.models.accessibility import Accessibility
from sb_db.models.categories import Categories
from sb_db.models.category import Category
from sb_db.models.dates import Dates
from sb_db.models.dates_inner import DatesInner
from sb_db.models.event import Event
from sb_db.models.event_audience_format import EventAudienceFormat
from sb_db.models.event_date import EventDate
from sb_db.models.event_dates import EventDates
from sb_db.models.event_ensemble_function import EventEnsembleFunction
from sb_db.models.event_ensembles import EventEnsembles
from sb_db.models.event_next_date import EventNextDate
from sb_db.models.event_photo_delivery import EventPhotoDelivery
from sb_db.models.event_price_handling import EventPriceHandling
from sb_db.models.events import Events
from sb_db.models.festival import Festival
from sb_db.models.festivals import Festivals
from sb_db.models.location import Location
from sb_db.models.location_contracts import LocationContracts
from sb_db.models.location_group import LocationGroup
from sb_db.models.location_groups import LocationGroups
from sb_db.models.locations import Locations
from sb_db.models.tag import Tag
from sb_db.models.tags import Tags
| 38.807018
| 70
| 0.853526
| 339
| 2,212
| 5.351032
| 0.286136
| 0.122381
| 0.163175
| 0.192944
| 0.233186
| 0.024256
| 0
| 0
| 0
| 0
| 0
| 0.004014
| 0.099005
| 2,212
| 56
| 71
| 39.5
| 0.906172
| 0.128391
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4c55a8f5d61ae301d8af59d841b0795bdc1999d3
| 7,944
|
py
|
Python
|
net/data/verify_certificate_chain_unittest/many-names/generate-chains.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
net/data/verify_certificate_chain_unittest/many-names/generate-chains.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 113
|
2015-05-04T09:58:14.000Z
|
2022-01-31T19:35:03.000Z
|
net/data/verify_certificate_chain_unittest/many-names/generate-chains.py
|
zealoussnow/chromium
|
fd8a8914ca0183f0add65ae55f04e287543c7d4a
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
#!/usr/bin/env python
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
sys.path += ['../..']
import gencerts
def add_excluded_name_constraints(cert, num_dns, num_ip, num_dirnames, num_uri):
cert.get_extensions().set_property('nameConstraints', '@nameConstraints_info')
constraints = cert.config.get_section('nameConstraints_info')
for i in range(num_dns):
constraints.set_property('excluded;DNS.%i' % (i + 1), 'x%i.test' % i)
for i in range(num_ip):
b,c = divmod(i, 256)
a,b = divmod(b, 256)
constraints.set_property('excluded;IP.%i' % (i + 1),
'11.%i.%i.%i/255.255.255.255' % (a, b, c))
for i in range(num_dirnames):
section_name = 'nameConstraints_dirname_x%i' % (i + 1)
dirname = cert.config.get_section(section_name)
dirname.set_property('commonName', '"x%i' % i)
constraints.set_property('excluded;dirName.%i' % (i + 1), section_name)
for i in range(num_uri):
constraints.set_property('excluded;URI.%i' % (i + 1), 'http://xest/%i' % i)
def add_permitted_name_constraints(
cert, num_dns, num_ip, num_dirnames, num_uri):
cert.get_extensions().set_property('nameConstraints', '@nameConstraints_info')
constraints = cert.config.get_section('nameConstraints_info')
for i in range(num_dns):
constraints.set_property('permitted;DNS.%i' % (i + 1), 't%i.test' % i)
for i in range(num_ip):
b,c = divmod(i, 256)
a,b = divmod(b, 256)
constraints.set_property('permitted;IP.%i' % (i + 1),
'10.%i.%i.%i/255.255.255.255' % (a, b, c))
for i in range(num_dirnames):
section_name = 'nameConstraints_dirname_p%i' % (i + 1)
dirname = cert.config.get_section(section_name)
dirname.set_property('commonName', '"t%i' % i)
constraints.set_property('permitted;dirName.%i' % (i + 1), section_name)
for i in range(num_uri):
constraints.set_property('permitted;URI.%i' % (i + 1),
'http://test/%i' % i)
def add_sans(cert, num_dns, num_ip, num_dirnames, num_uri):
cert.get_extensions().set_property('subjectAltName', '@san_info')
sans = cert.config.get_section('san_info')
for i in range(num_dns):
sans.set_property('DNS.%i' % (i + 1), 't%i.test' % i)
for i in range(num_ip):
b,c = divmod(i, 256)
a,b = divmod(b, 256)
sans.set_property('IP.%i' % (i + 1), '10.%i.%i.%i' % (a, b, c))
for i in range(num_dirnames):
section_name = 'san_dirname%i' % (i + 1)
dirname = cert.config.get_section(section_name)
dirname.set_property('commonName', '"t%i' % i)
sans.set_property('dirName.%i' % (i + 1), section_name)
for i in range(num_uri):
sans.set_property('URI.%i' % (i + 1), 'http://test/%i' % i)
# Self-signed root certificate.
root = gencerts.create_self_signed_root_certificate('Root')
# Use the same keys for all the chains. Fewer key files to check in, and also
# gives stability against re-ordering of the calls to |make_chain|.
intermediate_key = gencerts.get_or_generate_rsa_key(
2048, gencerts.create_key_path('Intermediate'))
target_key = gencerts.get_or_generate_rsa_key(
2048, gencerts.create_key_path('t0'))
def make_chain(name, doc, excluded, permitted, sans):
# Intermediate certificate.
intermediate = gencerts.create_intermediate_certificate('Intermediate', root)
intermediate.set_key(intermediate_key)
add_excluded_name_constraints(intermediate, **excluded)
add_permitted_name_constraints(intermediate, **permitted)
# Target certificate.
target = gencerts.create_end_entity_certificate('t0', intermediate)
target.set_key(target_key)
add_sans(target, **sans)
chain = [target, intermediate, root]
gencerts.write_chain(doc, chain, '%s.pem' % name)
make_chain(
'ok-all-types',
"A chain containing a large number of name constraints and names,\n"
"but below the limit.",
excluded=dict(num_dns=418, num_ip=418, num_dirnames=418, num_uri=1025),
permitted=dict(num_dns=418, num_ip=418, num_dirnames=418, num_uri=1025),
sans=dict(num_dns=418, num_ip=418, num_dirnames=417, num_uri=1025))
make_chain(
'toomany-all-types',
"A chain containing a large number of different types of name\n"
"constraints and names, above the limit.",
excluded=dict(num_dns=419, num_ip=419, num_dirnames=419, num_uri=0),
permitted=dict(num_dns=419, num_ip=419, num_dirnames=419, num_uri=0),
sans=dict(num_dns=419, num_ip=419, num_dirnames=418, num_uri=0))
make_chain(
'toomany-dns-excluded',
"A chain containing a large number of excluded DNS name\n"
"constraints and DNS names, above the limit.",
excluded=dict(num_dns=1025, num_ip=0, num_dirnames=0, num_uri=0),
permitted=dict(num_dns=0, num_ip=0, num_dirnames=0, num_uri=0),
sans=dict(num_dns=1024, num_ip=0, num_dirnames=0, num_uri=0))
make_chain(
'toomany-ips-excluded',
"A chain containing a large number of excluded IP name\n"
"constraints and IP names, above the limit.",
excluded=dict(num_dns=0, num_ip=1025, num_dirnames=0, num_uri=0),
permitted=dict(num_dns=0, num_ip=0, num_dirnames=0, num_uri=0),
sans=dict(num_dns=0, num_ip=1024, num_dirnames=0, num_uri=0))
make_chain(
'toomany-dirnames-excluded',
"A chain containing a large number of excluded directory name\n"
"constraints and directory names, above the limit.",
excluded=dict(num_dns=0, num_ip=0, num_dirnames=1025, num_uri=0),
permitted=dict(num_dns=0, num_ip=0, num_dirnames=0, num_uri=0),
sans=dict(num_dns=0, num_ip=0, num_dirnames=1024, num_uri=0))
make_chain(
'toomany-dns-permitted',
"A chain containing a large number of permitted DNS name\n"
"constraints and DNS names, above the limit.",
excluded=dict(num_dns=0, num_ip=0, num_dirnames=0, num_uri=0),
permitted=dict(num_dns=1025, num_ip=0, num_dirnames=0, num_uri=0),
sans=dict(num_dns=1024, num_ip=0, num_dirnames=0, num_uri=0))
make_chain(
'toomany-ips-permitted',
"A chain containing a large number of permitted IP name\n"
"constraints and IP names, above the limit.",
excluded=dict(num_dns=0, num_ip=0, num_dirnames=0, num_uri=0),
permitted=dict(num_dns=0, num_ip=1025, num_dirnames=0, num_uri=0),
sans=dict(num_dns=0, num_ip=1024, num_dirnames=0, num_uri=0))
make_chain(
'toomany-dirnames-permitted',
"A chain containing a large number of permitted directory name\n"
"constraints and directory names, above the limit.",
excluded=dict(num_dns=0, num_ip=0, num_dirnames=0, num_uri=0),
permitted=dict(num_dns=0, num_ip=0, num_dirnames=1025, num_uri=0),
sans=dict(num_dns=0, num_ip=0, num_dirnames=1024, num_uri=0))
make_chain(
'ok-different-types-dns',
"A chain containing a large number of name constraints and names,\n"
"but of different types, thus not triggering the limit.",
excluded=dict(num_dns=0, num_ip=1025, num_dirnames=1025, num_uri=1025),
permitted=dict(num_dns=0, num_ip=1025, num_dirnames=1025, num_uri=1025),
sans=dict(num_dns=1025, num_ip=0, num_dirnames=0, num_uri=0))
make_chain(
'ok-different-types-ips',
"A chain containing a large number of name constraints and names,\n"
"but of different types, thus not triggering the limit.",
excluded=dict(num_dns=1025, num_ip=0, num_dirnames=1025, num_uri=1025),
permitted=dict(num_dns=1025, num_ip=0, num_dirnames=1025, num_uri=1025),
sans=dict(num_dns=0, num_ip=1025, num_dirnames=0, num_uri=0))
make_chain(
'ok-different-types-dirnames',
"A chain containing a large number of name constraints and names,\n"
"but of different types, thus not triggering the limit.",
excluded=dict(num_dns=1025, num_ip=1025, num_dirnames=0, num_uri=1025),
permitted=dict(num_dns=1025, num_ip=1025, num_dirnames=0, num_uri=1025),
sans=dict(num_dns=0, num_ip=0, num_dirnames=1025, num_uri=0))
| 45.136364
| 80
| 0.702669
| 1,298
| 7,944
| 4.101695
| 0.105547
| 0.040571
| 0.061983
| 0.030428
| 0.794703
| 0.772727
| 0.769722
| 0.754132
| 0.73704
| 0.663411
| 0
| 0.052836
| 0.158988
| 7,944
| 175
| 81
| 45.394286
| 0.74405
| 0.049597
| 0
| 0.428571
| 0
| 0
| 0.262565
| 0.041639
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027211
| false
| 0
| 0.013605
| 0
| 0.040816
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4c6fe167f4b1c8772f52c1621baa1361224eac90
| 11,849
|
py
|
Python
|
census_data_downloader/tables/poverty.py
|
ian-r-rose/census-data-downloader
|
f8ac9d773e6d3f52be87bf916a2e32249391f966
|
[
"MIT"
] | null | null | null |
census_data_downloader/tables/poverty.py
|
ian-r-rose/census-data-downloader
|
f8ac9d773e6d3f52be87bf916a2e32249391f966
|
[
"MIT"
] | null | null | null |
census_data_downloader/tables/poverty.py
|
ian-r-rose/census-data-downloader
|
f8ac9d773e6d3f52be87bf916a2e32249391f966
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*
import collections
from census_data_downloader.core.tables import BaseTableConfig
from census_data_downloader.core.decorators import register
@register
class PovertyDownloader(BaseTableConfig):
"""
A simplified version of the poverty table that only returns grand totals.
"""
PROCESSED_TABLE_NAME = "poverty"
UNIVERSE = "population for whom poverty status is determined"
RAW_TABLE_NAME = 'B17001'
RAW_FIELD_CROSSWALK = collections.OrderedDict({
'001': 'universe',
'002': 'income_past12months_below_poverty_level',
'031': 'income_past12months_at_or_above_poverty_level'
})
@register
class PovertyBySexDownloader(BaseTableConfig):
"""
A simplified version of the poverty table that only returns totals by sex.
"""
PROCESSED_TABLE_NAME = "povertystatusbygender"
UNIVERSE = "population for whom poverty status is determined"
RAW_TABLE_NAME = 'B17001'
RAW_FIELD_CROSSWALK = collections.OrderedDict({
'001': "total",
'002': "total_below_poverty_level",
'003': "male_below_poverty_level",
'017': "female_below_poverty_level",
'031': "total_at_or_above_poverty_level",
'032': "male_at_or_above_poverty_level",
'046': "female_at_or_above_poverty_level"
})
@register
class PovertyAgeDownloader(BaseTableConfig):
"""
The full table.
"""
PROCESSED_TABLE_NAME = "povertystatusbyage"
UNIVERSE = "population for whom poverty status is determined"
RAW_TABLE_NAME = 'B17001'
RAW_FIELD_CROSSWALK = collections.OrderedDict({
'001': "total",
'002': "total_below_poverty_level",
'003': "male_below_poverty_level",
'004': "male_under_5_below_poverty_level",
'005': "male_5_below_poverty_level",
'006': "male_6_to_11_below_poverty_level",
'007': "male_12_to_14_below_poverty_level",
'008': "male_15_below_poverty_level",
'009': "male_16_to_17_below_poverty_level",
'010': "male_18_to_24_below_poverty_level",
'011': "male_25_to_34_below_poverty_level",
'012': "male_35_to_44_below_poverty_level",
'013': "male_45_to_54_below_poverty_level",
'014': "male_55_to_64_below_poverty_level",
'015': "male_65_to_74_below_poverty_level",
'016': "male_75_and_over_below_poverty_level",
'017': "female_below_poverty_level",
'018': "female_under_5_below_poverty_level",
'019': "female_5_below_poverty_level",
'020': "female_6_to_11_below_poverty_level",
'021': "female_12_to_14_below_poverty_level",
'022': "female_15_below_poverty_level",
'023': "female_16_to_17_below_poverty_level",
'024': "female_18_to_24_below_poverty_level",
'025': "female_25_to_34_below_poverty_level",
'026': "female_35_to_44_below_poverty_level",
'027': "female_45_to_54_below_poverty_level",
'028': "female_55_to_64_below_poverty_level",
'029': "female_65_to_74_below_poverty_level",
'030': "female_75_and_over_below_poverty_level",
'031': "total_at_or_above_poverty_level",
'032': "male_at_or_above_poverty_level",
'033': "male_under_5_at_or_above_poverty_level",
'034': "male_5_at_or_above_poverty_level",
'035': "male_6_to_11_at_or_above_poverty_level",
'036': "male_12_to_14_at_or_above_poverty_level",
'037': "male_15_at_or_above_poverty_level",
'038': "male_16_to_17_at_or_above_poverty_level",
'039': "male_18_to_24_at_or_above_poverty_level",
'040': "male_25_to_34_at_or_above_poverty_level",
'041': "male_35_to_44_at_or_above_poverty_level",
'042': "male_45_to_54_at_or_above_poverty_level",
'043': "male_55_to_64_at_or_above_poverty_level",
'044': "male_65_to_74_at_or_above_poverty_level",
'045': "male_75_and_over_at_or_above_poverty_level",
'046': "female_at_or_above_poverty_level",
'047': "female_under_5_at_or_above_poverty_level",
'048': "female_5_at_or_above_poverty_level",
'049': "female_6_to_11_at_or_above_poverty_level",
'050': "female_12_to_14_at_or_above_poverty_level",
'051': "female_15_at_or_above_poverty_level",
'052': "female_16_to_17_at_or_above_poverty_level",
'053': "female_18_to_24_at_or_above_poverty_level",
'054': "female_25_to_34_at_or_above_poverty_level",
'055': "female_35_to_44_at_or_above_poverty_level",
'056': "female_45_to_54_at_or_above_poverty_level",
'057': "female_55_to_64_at_or_above_poverty_level",
'058': "female_65_to_74_at_or_above_poverty_level",
'059': "female_75_and_over_at_or_above_poverty_level"
})
def process(self, *args, **kwargs):
df = super().process(*args, **kwargs)
# Calculate totals for both genders together
groups = [
'under_5_below_poverty_level',
'5_below_poverty_level',
'6_to_11_below_poverty_level',
'12_to_14_below_poverty_level',
'15_below_poverty_level',
'16_to_17_below_poverty_level',
'18_to_24_below_poverty_level',
'25_to_34_below_poverty_level',
'35_to_44_below_poverty_level',
'45_to_54_below_poverty_level',
'55_to_64_below_poverty_level',
'65_to_74_below_poverty_level',
'75_and_over_below_poverty_level',
'under_5_at_or_above_poverty_level',
'5_at_or_above_poverty_level',
'6_to_11_at_or_above_poverty_level',
'12_to_14_at_or_above_poverty_level',
'15_at_or_above_poverty_level',
'16_to_17_at_or_above_poverty_level',
'18_to_24_at_or_above_poverty_level',
'25_to_34_at_or_above_poverty_level',
'35_to_44_at_or_above_poverty_level',
'45_to_54_at_or_above_poverty_level',
'55_to_64_at_or_above_poverty_level',
'65_to_74_at_or_above_poverty_level',
'75_and_over_at_or_above_poverty_level'
]
for g in groups:
df[f'total_{g}'] = df[f'male_{g}'] + df[f'female_{g}']
# Calculate custom group sets
groupsets = collections.OrderedDict({
"0_to_17_total": [
'under_5_below_poverty_level',
'5_below_poverty_level',
'6_to_11_below_poverty_level',
'12_to_14_below_poverty_level',
'15_below_poverty_level',
'16_to_17_below_poverty_level',
'under_5_at_or_above_poverty_level',
'5_at_or_above_poverty_level',
'6_to_11_at_or_above_poverty_level',
'12_to_14_at_or_above_poverty_level',
'15_at_or_above_poverty_level',
'16_to_17_at_or_above_poverty_level',
],
"0_to_17_below_poverty_level": [
'under_5_below_poverty_level',
'5_below_poverty_level',
'6_to_11_below_poverty_level',
'12_to_14_below_poverty_level',
'15_below_poverty_level',
'16_to_17_below_poverty_level',
],
"18_to_34_total": [
'18_to_24_below_poverty_level',
'25_to_34_below_poverty_level',
'18_to_24_at_or_above_poverty_level',
'25_to_34_at_or_above_poverty_level',
],
"18_to_34_below_poverty_level": [
'18_to_24_below_poverty_level',
'25_to_34_below_poverty_level',
],
"35_to_64_total": [
'35_to_44_below_poverty_level',
'45_to_54_below_poverty_level',
'55_to_64_below_poverty_level',
'35_to_44_at_or_above_poverty_level',
'45_to_54_at_or_above_poverty_level',
'55_to_64_at_or_above_poverty_level',
],
"35_to_64_below_poverty_level": [
'35_to_44_below_poverty_level',
'45_to_54_below_poverty_level',
'55_to_64_below_poverty_level',
],
"65_and_over_total": [
'65_to_74_below_poverty_level',
'75_and_over_below_poverty_level',
'65_to_74_at_or_above_poverty_level',
'75_and_over_at_or_above_poverty_level',
],
"65_and_over_below_poverty_level": [
'65_to_74_below_poverty_level',
'75_and_over_below_poverty_level',
],
"0_to_11_total": [
'12_to_14_below_poverty_level',
'15_below_poverty_level',
'16_to_17_below_poverty_level',
'12_to_14_at_or_above_poverty_level',
'15_at_or_above_poverty_level',
'16_to_17_at_or_above_poverty_level',
],
"0_to_11_below_poverty_level": [
'12_to_14_below_poverty_level',
'15_below_poverty_level',
'16_to_17_below_poverty_level',
],
"12_to_17_total": [
'12_to_14_below_poverty_level',
'15_below_poverty_level',
'16_to_17_below_poverty_level',
'12_to_14_at_or_above_poverty_level',
'15_at_or_above_poverty_level',
'16_to_17_at_or_above_poverty_level',
],
"12_to_17_below_poverty_level": [
'12_to_14_below_poverty_level',
'15_below_poverty_level',
'16_to_17_below_poverty_level',
],
"18_to_64_total": [
'18_to_24_below_poverty_level',
'25_to_34_below_poverty_level',
'35_to_44_below_poverty_level',
'45_to_54_below_poverty_level',
'55_to_64_below_poverty_level',
'18_to_24_at_or_above_poverty_level',
'25_to_34_at_or_above_poverty_level',
'35_to_44_at_or_above_poverty_level',
'45_to_54_at_or_above_poverty_level',
'55_to_64_at_or_above_poverty_level',
],
"18_to_64_below_poverty_level": [
'18_to_24_below_poverty_level',
'25_to_34_below_poverty_level',
'35_to_44_below_poverty_level',
'45_to_54_below_poverty_level',
'55_to_64_below_poverty_level',
]
})
for groupset, group_list in groupsets.items():
df[f'total_{groupset}'] = df[[f'total_{f}' for f in group_list]].sum(axis=1)
df[f'male_{groupset}'] = df[[f'male_{f}' for f in group_list]].sum(axis=1)
df[f'female_{groupset}'] = df[[f'female_{f}' for f in group_list]].sum(axis=1)
# Pass it back
return df
@register
class PovertyLatinoDownloader(PovertyAgeDownloader):
PROCESSED_TABLE_NAME = "povertylatino"
RAW_TABLE_NAME = 'B17001I'
UNIVERSE = "Latino population for whom poverty status is determined"
@register
class PovertyWhiteDownloader(PovertyAgeDownloader):
PROCESSED_TABLE_NAME = "povertywhite"
RAW_TABLE_NAME = 'B17001A'
UNIVERSE = "white population for whom poverty status is determined"
@register
class PovertyBlackDownloader(PovertyAgeDownloader):
PROCESSED_TABLE_NAME = "povertyblack"
RAW_TABLE_NAME = 'B17001B'
UNIVERSE = "Black population for whom poverty status is determined"
@register
class PovertyAsianDownloader(PovertyAgeDownloader):
PROCESSED_TABLE_NAME = "povertyasian"
RAW_TABLE_NAME = 'B17001D'
UNIVERSE = "Asian population for whom poverty status is determined"
| 41.575439
| 90
| 0.641995
| 1,547
| 11,849
| 4.247576
| 0.124758
| 0.312281
| 0.2613
| 0.170446
| 0.752245
| 0.733678
| 0.654238
| 0.628367
| 0.537361
| 0.508598
| 0
| 0.090941
| 0.269643
| 11,849
| 284
| 91
| 41.721831
| 0.668361
| 0.024559
| 0
| 0.559524
| 0
| 0
| 0.546663
| 0.467848
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003968
| false
| 0
| 0.011905
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d5df431403f64172c72f521ac0b9a6cc95545c67
| 243
|
py
|
Python
|
birdman/__init__.py
|
mortonjt/BIRDMAn
|
2b26ead6fe595deddc7090f58ce0cb06a4767761
|
[
"BSD-3-Clause"
] | null | null | null |
birdman/__init__.py
|
mortonjt/BIRDMAn
|
2b26ead6fe595deddc7090f58ce0cb06a4767761
|
[
"BSD-3-Clause"
] | null | null | null |
birdman/__init__.py
|
mortonjt/BIRDMAn
|
2b26ead6fe595deddc7090f58ce0cb06a4767761
|
[
"BSD-3-Clause"
] | null | null | null |
from .model_base import BaseModel, RegressionModel
from .default_models import NegativeBinomial, NegativeBinomialLME, Multinomial
__all__ = ["BaseModel", "RegressionModel", "NegativeBinomial",
"NegativeBinomialLME", "Multinomial"]
| 40.5
| 78
| 0.786008
| 19
| 243
| 9.736842
| 0.631579
| 0.259459
| 0.497297
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123457
| 243
| 5
| 79
| 48.6
| 0.868545
| 0
| 0
| 0
| 0
| 0
| 0.288066
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d5f08114d3dd0bb5444823619dc69dcb662e379e
| 3,496
|
py
|
Python
|
assets/world/tiles_0_0_1.py
|
Elephant34/flatSurvival
|
c5733d199048f16f5bec96ab92096db833779448
|
[
"MIT"
] | null | null | null |
assets/world/tiles_0_0_1.py
|
Elephant34/flatSurvival
|
c5733d199048f16f5bec96ab92096db833779448
|
[
"MIT"
] | 3
|
2021-09-08T02:08:08.000Z
|
2022-03-12T00:33:37.000Z
|
assets/world/tiles_0_0_1.py
|
Elephant34/flatSurvival
|
c5733d199048f16f5bec96ab92096db833779448
|
[
"MIT"
] | null | null | null |
"""Stores the tile sprites for version: 0.0.1
"""
import pathlib
import arcade
class Basic(arcade.Sprite):
"""Creates a bass tile sprite
:param center_x: x position
:type center_x: int
:param center_y: y position
:type center_y: int
:param zone: zone coordinates
:type zone: str
"""
def __init__(
self,
path: pathlib.Path,
center_x: int,
center_y: int,
zone: str
) -> None:
"""Constructor method
"""
super().__init__(
path,
center_x=center_x,
center_y=center_y
)
self.player_collides = False
self.zone = zone
class Unknown(Basic):
"""Creates a unknown tile sprite
:param center_x: x position
:type center_x: int
:param center_y: y position
:type center_y: int
"""
def __init__(self, center_x: int, center_y: int, zone: str) -> None:
"""Constructor method
"""
super().__init__(
pathlib.Path("assets/data/tiles/unknown.png"),
center_x,
center_y,
zone
)
self.player_collides = True
class Void(Basic):
"""Creates a void tile sprite
:param center_x: x position
:type center_x: int
:param center_y: y position
:type center_y: int
"""
def __init__(self, center_x: int, center_y: int, zone: str) -> None:
"""Constructor method
"""
super().__init__(
pathlib.Path("assets/data/tiles/void.png"),
center_x,
center_y,
zone
)
self.player_collides = True
class Grass(Basic):
"""Creates a grass tile sprite
:param center_x: x position
:type center_x: int
:param center_y: y position
:type center_y: int
"""
def __init__(self, center_x: int, center_y: int, zone: str) -> None:
"""Constructor method
"""
super().__init__(
pathlib.Path("assets/data/tiles/grass.png"),
center_x,
center_y,
zone
)
class Stone(Basic):
"""Creates a stone tile sprite
:param center_x: x position
:type center_x: int
:param center_y: y position
:type center_y: int
"""
def __init__(self, center_x: int, center_y: int, zone: str) -> None:
"""Constructor method
"""
super().__init__(
pathlib.Path("assets/data/tiles/stone.png"),
center_x,
center_y,
zone
)
class Tree(Basic):
"""Creates a tree tile sprite
:param center_x: x position
:type center_x: int
:param center_y: y position
:type center_y: int
"""
def __init__(self, center_x: int, center_y: int, zone: str) -> None:
"""Constructor method
"""
super().__init__(
pathlib.Path("assets/data/tiles/tree.png"),
center_x,
center_y,
zone
)
class Wall(Basic):
"""Creates a wall tile sprite
:param center_x: x position
:type center_x: int
:param center_y: y position
:type center_y: int
"""
def __init__(self, center_x: int, center_y: int, zone: str) -> None:
"""Constructor method
"""
super().__init__(
pathlib.Path("assets/data/tiles/wall.png"),
center_x,
center_y,
zone
)
self.player_collides = True
| 21.447853
| 72
| 0.545195
| 418
| 3,496
| 4.277512
| 0.117225
| 0.113535
| 0.14094
| 0.082215
| 0.781879
| 0.781879
| 0.781879
| 0.738255
| 0.738255
| 0.738255
| 0
| 0.001315
| 0.347254
| 3,496
| 162
| 73
| 21.580247
| 0.782209
| 0.33095
| 0
| 0.5
| 0
| 0
| 0.076412
| 0.076412
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102941
| false
| 0
| 0.029412
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
9111888169eb67b236e432ec5e89397a7c877e16
| 925
|
py
|
Python
|
lit_script/src/compiler.py
|
MaciejWas/lit-script
|
d4488aed8f09e92a3ef6207f3e6aadc8197fe386
|
[
"MIT"
] | 4
|
2021-10-03T15:22:13.000Z
|
2021-10-04T09:37:11.000Z
|
lit_script/src/compiler.py
|
MaciejWas/lit-script
|
d4488aed8f09e92a3ef6207f3e6aadc8197fe386
|
[
"MIT"
] | null | null | null |
lit_script/src/compiler.py
|
MaciejWas/lit-script
|
d4488aed8f09e92a3ef6207f3e6aadc8197fe386
|
[
"MIT"
] | null | null | null |
from .core import Expression, Atom, Line, Context, inbuilt_functions
Compilable = list[Line]
class CompileLog:
def __init__(self):
self.events = []
def log(self, info: str):
self.events.append(info)
def __repr__(self):
return "\n".join(self.events)
def __str__(self):
return "\n".join(self.events)
class Compiler:
def __init__(self):
self.global_context = Context()
def setup_inbuilt(self):
self.global_context.add_variables(inbuilt_functions)
def resolve(self, expr: Expression) -> Atom:
return expr.resolve(self.global_context)
def compile(self, lines: Compilable):
type_check_passed = self.check_types(lines)
if not type_check_passed:
return Exception("Type check failed.")
print("Compiled! :)")
def check_types(self, lines: Compilable) -> bool:
return True # TODO: Impl
| 23.125
| 68
| 0.644324
| 111
| 925
| 5.108108
| 0.432432
| 0.070547
| 0.089947
| 0.05291
| 0.088183
| 0.088183
| 0
| 0
| 0
| 0
| 0
| 0
| 0.245405
| 925
| 39
| 69
| 23.717949
| 0.812321
| 0.010811
| 0
| 0.16
| 0
| 0
| 0.03724
| 0
| 0
| 0
| 0
| 0.025641
| 0
| 1
| 0.36
| false
| 0.08
| 0.04
| 0.16
| 0.68
| 0.04
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
9115720aa80de8e80c86b615ca8ae0241cff4dec
| 54
|
py
|
Python
|
captum/insights/attr_vis/__init__.py
|
Europium248/captum
|
ac02fae2651b8d68a44bcb9d03b91cbb3959f2fc
|
[
"BSD-3-Clause"
] | 7
|
2020-08-26T09:36:54.000Z
|
2021-12-21T15:46:32.000Z
|
captum/insights/attr_vis/__init__.py
|
Europium248/captum
|
ac02fae2651b8d68a44bcb9d03b91cbb3959f2fc
|
[
"BSD-3-Clause"
] | 16
|
2020-08-25T18:44:45.000Z
|
2022-03-25T19:12:36.000Z
|
captum/insights/attr_vis/__init__.py
|
Europium248/captum
|
ac02fae2651b8d68a44bcb9d03b91cbb3959f2fc
|
[
"BSD-3-Clause"
] | 10
|
2020-09-30T19:27:17.000Z
|
2021-04-04T14:50:31.000Z
|
from .app import AttributionVisualizer, Batch # noqa
| 27
| 53
| 0.796296
| 6
| 54
| 7.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 54
| 1
| 54
| 54
| 0.934783
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9121edb0f7fab56706f60e56fb3c98821fad4996
| 88
|
py
|
Python
|
planning_system/app/blueprints/user/__init__.py
|
jehboyes/planning_system
|
a415f1408ef344732498d2ffb111dfd187b9b50f
|
[
"MIT"
] | null | null | null |
planning_system/app/blueprints/user/__init__.py
|
jehboyes/planning_system
|
a415f1408ef344732498d2ffb111dfd187b9b50f
|
[
"MIT"
] | null | null | null |
planning_system/app/blueprints/user/__init__.py
|
jehboyes/planning_system
|
a415f1408ef344732498d2ffb111dfd187b9b50f
|
[
"MIT"
] | null | null | null |
"""
User management section of the app.
"""
from .bp import user_bp
from . import login
| 14.666667
| 35
| 0.715909
| 14
| 88
| 4.428571
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 88
| 5
| 36
| 17.6
| 0.861111
| 0.397727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e680d4138363e7c71746320f38b220e2e4efa810
| 114
|
py
|
Python
|
python/dermomedic/encoder.py
|
fbrunacci/dermomedic.ai
|
361f1f0a52c457d27e2bc648f0888bdf71cb0f32
|
[
"Apache-2.0"
] | null | null | null |
python/dermomedic/encoder.py
|
fbrunacci/dermomedic.ai
|
361f1f0a52c457d27e2bc648f0888bdf71cb0f32
|
[
"Apache-2.0"
] | null | null | null |
python/dermomedic/encoder.py
|
fbrunacci/dermomedic.ai
|
361f1f0a52c457d27e2bc648f0888bdf71cb0f32
|
[
"Apache-2.0"
] | null | null | null |
from json import JSONEncoder
class DictEncoder(JSONEncoder):
def default(self, o):
return o.__dict__
| 19
| 31
| 0.719298
| 14
| 114
| 5.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 114
| 5
| 32
| 22.8
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
e6aa0f6654ae612b3d6e03152a37d40e81cf4608
| 18
|
py
|
Python
|
One Road.py
|
MrAnonymous5635/CSCircles
|
010ac82942c88da357e214ea5462ec378f3667b8
|
[
"MIT"
] | 17
|
2018-09-19T09:44:33.000Z
|
2022-01-17T15:17:11.000Z
|
One Road.py
|
MrAnonymous5635/CSCircles
|
010ac82942c88da357e214ea5462ec378f3667b8
|
[
"MIT"
] | 2
|
2020-02-24T15:28:33.000Z
|
2021-11-16T00:04:52.000Z
|
One Road.py
|
MrAnonymous5635/CSCircles
|
010ac82942c88da357e214ea5462ec378f3667b8
|
[
"MIT"
] | 8
|
2020-02-20T00:02:06.000Z
|
2022-01-06T17:25:51.000Z
|
print(min(a,b,c))
| 9
| 17
| 0.611111
| 5
| 18
| 2.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 18
| 1
| 18
| 18
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
e6ae92c90360bed3a73e400477f49b83ba471043
| 50
|
py
|
Python
|
watchdog_node/src/watchdog_node/monitors/__init__.py
|
Jailander/strands_apps
|
5bc380bfb37e5717bc9503506eba82c5d86a4d93
|
[
"MIT"
] | null | null | null |
watchdog_node/src/watchdog_node/monitors/__init__.py
|
Jailander/strands_apps
|
5bc380bfb37e5717bc9503506eba82c5d86a4d93
|
[
"MIT"
] | null | null | null |
watchdog_node/src/watchdog_node/monitors/__init__.py
|
Jailander/strands_apps
|
5bc380bfb37e5717bc9503506eba82c5d86a4d93
|
[
"MIT"
] | null | null | null |
from base_monitor import MonitorType
import topics
| 25
| 36
| 0.9
| 7
| 50
| 6.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 50
| 2
| 37
| 25
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fc0511ee6c6a7c181316be1a94fe6ae3a73be980
| 11,199
|
py
|
Python
|
crypto/little_trick/little_trick.py
|
D0g3-Lab/i-SOON_CTF_2021
|
e6eb2050df686953ea861259797a72a60a960ebf
|
[
"MIT"
] | 2
|
2022-03-03T15:40:14.000Z
|
2022-03-06T02:50:36.000Z
|
crypto/little_trick/little_trick.py
|
D0g3-Lab/i-SOON_CTF_2021
|
e6eb2050df686953ea861259797a72a60a960ebf
|
[
"MIT"
] | null | null | null |
crypto/little_trick/little_trick.py
|
D0g3-Lab/i-SOON_CTF_2021
|
e6eb2050df686953ea861259797a72a60a960ebf
|
[
"MIT"
] | null | null | null |
from Crypto.Util.number import sieve_base, bytes_to_long, getPrime
import random
import gmpy2
import os
flag = b'D0g3{}'
flag = bytes_to_long(flag)
p = getPrime(1024)
q = getPrime(1024)
n = p * q
e = gmpy2.next_prime(bytes_to_long(os.urandom(3)))
c = gmpy2.powmod(flag,e,n)
print(p)
print(q)
print(c)
dp = ''
seeds = []
for i in range(0,len(dp)):
seeds.append(random.randint(0,99))
print(seeds)
result = []
for j in range(0,len(dp)):
random.seed(seeds[j])
rands = []
for k in range(0,4):
rands.append(random.randint(0,99))
result.append((~ord(dp[j])|rands[j%4]) & (ord(dp[j])|~rands[j%4]))
del rands[j%4]
print(rands)
print(result)
dq = ''
C = []
E = 0x10001
list_p = sieve_base[0:len(dq)]
list_q = sieve_base[len(dq):2*len(dq)]
for l in range(0,len(dq)):
P = list_p[l]
Q = list_q[l]
C.append(pow(int(dq[l]),E,P*Q))
print(C)
#119494148343917708105807117614773529196380452025859574123211538859983094108015678321724495609785332508563534950957367289723559468197440246960403054020452985281797756117166991826626612422135797192886041925043855329391156291955066822268279533978514896151007690729926904044407542983781817530576308669792533266431
#125132685086281666800573404868585424815247082213724647473226016452471461555742194042617318063670311290694310562746442372293133509175379170933514423842462487594186286854028887049828613566072663640036114898823281310177406827049478153958964127866484011400391821374773362883518683538899757137598483532099590137741
#10238271315477488225331712641083290024488811710093033734535910573493409567056934528110845049143193836706122210303055466145819256893293429223389828252657426030118534127684265261192503406287408932832340938343447997791634435068366383965928991637536875223511277583685579314781547648602666391656306703321971680803977982711407979248979910513665732355859523500729534069909408292024381225192240385351325999798206366949106362537376452662264512012770586451783712626665065161704126536742755054830427864982782030834837388544811172279496657776884209756069056812750476669508640817369423238496930357725842768918791347095504283368032
#[3, 0, 39, 78, 14, 49, 73, 83, 55, 48, 30, 28, 23, 16, 54, 23, 68, 7, 20, 8, 98, 68, 45, 36, 97, 13, 83, 68, 16, 59, 81, 26, 51, 45, 36, 60, 36, 94, 58, 11, 19, 33, 95, 12, 60, 38, 51, 95, 21, 3, 38, 72, 47, 80, 7, 20, 26, 80, 18, 43, 92, 4, 64, 93, 91, 12, 86, 63, 46, 73, 89, 5, 91, 17, 88, 94, 80, 42, 90, 14, 45, 53, 91, 16, 28, 81, 62, 63, 66, 20, 81, 3, 43, 99, 54, 22, 2, 27, 2, 62, 88, 99, 78, 25, 76, 49, 28, 96, 95, 57, 94, 53, 32, 58, 32, 72, 89, 15, 4, 78, 89, 74, 86, 45, 51, 65, 13, 75, 95, 42, 20, 77, 34, 66, 56, 20, 26, 18, 28, 11, 88, 62, 72, 27, 74, 42, 63, 76, 82, 97, 75, 92, 1, 5, 20, 78, 46, 85, 81, 54, 64, 87, 37, 91, 38, 39, 1, 90, 61, 28, 13, 60, 37, 90, 87, 15, 78, 91, 99, 58, 62, 73, 70, 56, 82, 5, 19, 54, 76, 88, 4, 3, 55, 3, 3, 22, 85, 67, 98, 28, 32, 42, 48, 96, 69, 3, 83, 48, 26, 20, 45, 16, 45, 47, 92, 0, 54, 4, 73, 8, 31, 38, 3, 10, 84, 60, 59, 69, 64, 91, 98, 73, 81, 98, 9, 70, 44, 44, 24, 95, 83, 49, 31, 19, 89, 18, 20, 78, 86, 95, 83, 23, 42, 51, 95, 80, 48, 46, 88, 7, 47, 64, 55, 4, 62, 37, 71, 75, 98, 67, 98, 58, 66, 70, 24, 58, 56, 44, 11, 78, 1, 78, 89, 97, 83, 72, 98, 12, 41, 33, 14, 40, 27, 5, 18, 35, 25, 31, 69, 97, 84, 47, 25, 90, 78, 15, 72, 71]
#[54, 36, 60] [84, 42, 25] [20, 38, 39] [81, 9, 92] [70, 65, 94] [6, 11, 75] [27, 50, 46] [49, 85, 8] [95, 14, 73] [54, 71, 30] [53, 28, 65] [11, 13, 59] [94, 89, 8] [36, 41, 44] [91, 13, 48] [92, 94, 89] [94, 74, 90] [32, 65, 7] [90, 68, 90] [22, 96, 12] [83, 35, 5] [74, 74, 90] [27, 48, 33] [32, 98, 95] [80, 37, 84] [25, 68, 84] [49, 85, 37] [74, 94, 74] [48, 41, 44] [22, 94, 2] [50, 45, 38] [74, 20, 20] [50, 16, 82] [27, 8, 33] [32, 98, 91] [30, 57, 26] [98, 95, 91] [54, 28, 43] [58, 20, 94] [45, 55, 92] [78, 52, 51] [57, 81, 27] [76, 51, 53] [47, 65, 66] [57, 26, 80] [63, 72, 6] [24, 50, 82] [76, 51, 99] [68, 63, 47] [23, 36, 60] [63, 42, 6] [7, 59, 98] [43, 45, 34] [27, 70, 95] [32, 15, 7] [90, 68, 76] [20, 20, 60] [27, 70, 95] [18, 66, 19] [3, 69, 14] [56, 55, 58] [23, 39, 15] [47, 63, 92] [91, 49, 56] [17, 68, 16] [47, 66, 14] [79, 3, 31] [44, 29, 90] [39, 58, 85] [27, 56, 46] [8, 60, 14] [62, 74, 79] [17, 68, 16] [52, 96, 28] [39, 18, 62] [54, 12, 28] [54, 70, 95] [63, 27, 22] [20, 9, 58] [10, 70, 65] [48, 8, 33] [61, 45, 71] [8, 17, 16] [36, 48, 41] [13, 59, 17] [50, 55, 38] [92, 17, 23] [44, 29, 90] [43, 24, 44] [90, 76, 90] [50, 45, 38] [23, 54, 36] [69, 14, 46] [40, 17, 24] [91, 13, 48] [95, 14, 2] [94, 5, 8] [64, 95, 19] [95, 94, 8] [92, 17, 97] [18, 90, 62] [40, 17, 24] [81, 9, 73] [37, 92, 84] [95, 20, 29] [6, 11, 75] [11, 13, 17] [37, 90, 39] [51, 99, 53] [4, 1, 51] [54, 12, 43] [61, 89, 45] [21, 30, 90] [58, 64, 94] [7, 21, 90] [7, 59, 98] [60, 99, 14] [96, 73, 15] [23, 10, 15] [81, 9, 92] [60, 99, 14] [85, 11, 12] [79, 3, 31] [27, 48, 8] [50, 16, 82] [41, 84, 44] [25, 68, 84] [45, 43, 4] [51, 99, 53] [63, 27, 22] [90, 68, 90] [79, 32, 24] [58, 84, 89] [7, 24, 44] [96, 55, 52] [90, 68, 76] [20, 20, 60] [18, 33, 19] [11, 13, 17] [45, 55, 92] [18, 90, 62] [92, 97, 23] [7, 59, 34] [64, 70, 95] [51, 11, 12] [63, 27, 22] [44, 29, 48] [37, 95, 20] [48, 50, 96] [19, 37, 84] [45, 43, 76] [42, 56, 55] [84, 76, 25] [62, 79, 94] [90, 68, 90] [81, 9, 92] [39, 58, 85] [19, 10, 90] [50, 45, 38] [91, 13, 55] [63, 40, 92] [14, 83, 54] [68, 9, 84] [8, 17, 68] [42, 72, 6] [20, 19, 39] [13, 84, 25] [20, 9, 65] [55, 80, 32] [11, 59, 17] [25, 68, 84] [30, 57, 26] [9, 61, 84] [20, 65, 58] [14, 18, 54] [96, 1, 73] [9, 92, 73] [8, 68, 16] [40, 20, 24] [58, 20, 64] [17, 97, 23] [27, 56, 46] [90, 29, 13] [96, 55, 47] [48, 50, 96] [62, 79, 94] [67, 78, 51] [91, 13, 55] [95, 20, 29] [39, 90, 62] [23, 10, 15] [23, 54, 36] [95, 14, 73] [23, 36, 60] [23, 54, 60] [95, 14, 2] [61, 10, 90] [7, 97, 41] [35, 83, 5] [11, 13, 59] [21, 30, 90] [63, 27, 22] [54, 13, 30] [37, 90, 39] [9, 16, 60] [23, 36, 60] [49, 85, 37] [54, 13, 71] [20, 20, 60] [90, 76, 90] [27, 48, 33] [36, 48, 41] [48, 8, 33] [35, 45, 34] [42, 56, 58] [84, 75, 42] [13, 55, 48] [23, 39, 15] [27, 50, 46] [22, 96, 12] [11, 39, 68] [63, 72, 6] [23, 54, 60] [57, 42, 57] [91, 3, 0] [30, 26, 80] [22, 93, 2] [68, 9, 16] [63, 40, 92] [8, 68, 16] [35, 83, 5] [27, 50, 56] [45, 55, 38] [35, 35, 5] [46, 37, 86] [90, 29, 45] [54, 86, 17] [40, 86, 17] [71, 83, 99] [76, 51, 99] [85, 8, 37] [6, 11, 75] [1, 11, 68] [67, 78, 52] [60, 99, 14] [18, 33, 19] [90, 68, 90] [81, 9, 92] [3, 83, 31] [76, 99, 53] [49, 85, 37] [92, 94, 89] [2, 27, 22] [24, 16, 82] [76, 51, 53] [27, 54, 70] [13, 71, 30] [88, 58, 85] [39, 18, 62] [32, 15, 65] [43, 45, 34] [47, 40, 92] [9, 95, 73] [23, 10, 39] [17, 97, 23] [68, 61, 84] [32, 62, 98] [45, 43, 4] [83, 35, 5] [7, 97, 41] [35, 83, 5] [58, 20, 64] [43, 24, 44] [90, 45, 13] [71, 83, 99] [58, 20, 64] [55, 47, 52] [40, 86, 17] [45, 55, 46] [81, 9, 92] [84, 76, 25] [81, 92, 73] [8, 60, 14] [19, 80, 37] [85, 8, 37] [7, 98, 34] [35, 83, 5] [47, 65, 66] [23, 16, 91] [57, 81, 27] [10, 70, 94] [45, 87, 3] [70, 95, 19] [62, 79, 94] [18, 66, 19] [54, 75, 74] [92, 84, 21] [1, 39, 68] [68, 9, 60] [19, 80, 37] [91, 3, 0] [35, 45, 34] [37, 92, 21] [20, 9, 65] [9, 92, 73] [96, 73, 15] [7, 59, 34] [32, 62, 0]
#[-38, -121, -40, -125, -51, -29, -2, -21, -59, -54, -51, -40, -105, -5, -4, -50, -127, -56, -124, -128, -23, -104, -63, -112, -34, -115, -58, -99, -24, -102, -1, -5, -34, -3, -104, -103, -21, -62, -121, -24, -115, -9, -87, -56, -39, -30, -34, -4, -33, -5, -114, -21, -19, -7, -119, -107, -115, -6, -25, -27, -32, -62, -28, -20, -60, -121, -102, -10, -112, -7, -85, -110, -62, -100, -110, -29, -41, -55, -113, -112, -45, -106, -125, -25, -57, -27, -83, -2, -51, -118, -2, -10, -50, -40, -1, -82, -111, -113, -50, -48, -23, -33, -112, -38, -29, -26, -4, -40, -123, -4, -44, -120, -63, -38, -41, -22, -50, -50, -17, -122, -61, -5, -100, -22, -44, -47, -125, -125, -127, -55, -117, -100, -2, -26, -32, -111, -123, -118, -16, -24, -20, -40, -92, -40, -102, -49, -99, -45, -59, -98, -49, -13, -62, -128, -121, -114, -112, -13, -3, -4, -26, -35, -15, -35, -8, -18, -125, -14, -6, -60, -113, -104, -120, -64, -104, -55, -104, -41, -34, -106, -105, -2, -28, -14, -58, -128, -3, -1, -17, -38, -18, -12, -59, -4, -19, -82, -40, -122, -18, -42, -53, -60, -113, -40, -126, -15, -63, -40, -124, -114, -58, -26, -35, -26, -8, -48, -112, -52, -11, -117, -52, -32, -21, -38, -124, -13, -103, -6, -30, -33, -28, -31, -1, -97, -59, -64, -28, -1, -40, -2, -10, -26, -24, -3, -50, -113, -125, -122, -124, -5, -50, -62, -11, -8, -88, -109, -7, -31, -105, -54, -28, -8, -62, -58, -101, -58, -53, -124, -18, -124, -17, -109, -52, -45, -40, -109, -85, -7, -108, -121, -58, -49, -91, -102, -8, -10, -17, -55, -19, -11, -116, -47, -120, -121, -23, -99, -19, -51, -36, -110, -126, -29, -110, -9, -97, -54, -83, -86]
#[1, 0, 7789, 1, 17598, 20447, 15475, 23040, 41318, 23644, 53369, 19347, 66418, 5457, 0, 1, 14865, 97631, 6459, 36284, 79023, 1, 157348, 44667, 185701, 116445, 23809, 220877, 0, 1, 222082, 30333, 55446, 207442, 193806, 149389, 173229, 349031, 152205, 1, 149157, 196626, 1, 222532, 10255, 46268, 171536, 0, 351788, 152678, 0, 172225, 109296, 0, 579280, 634746, 1, 668942, 157973, 1, 17884, 662728, 759841, 450490, 0, 139520, 157015, 616114, 199878, 154091, 1, 937462, 675736, 53200, 495985, 307528, 1, 804492, 790322, 463560, 520991, 436782, 762888, 267227, 306436, 1051437, 384380, 505106, 729384, 1261978, 668266, 1258657, 913103, 935600, 1, 1, 401793, 769612, 484861, 1024896, 517254, 638872, 1139995, 700201, 308216, 333502, 0, 0, 401082, 1514640, 667345, 1015119, 636720, 1011683, 795560, 783924, 1269039, 5333, 0, 368271, 1700344, 1, 383167, 7540, 1490472, 1484752, 918665, 312560, 688665, 967404, 922857, 624126, 889856, 1, 848912, 1426397, 1291770, 1669069, 0, 1709762, 130116, 1711413, 1336912, 2080992, 820169, 903313, 515984, 2211283, 684372, 2773063, 391284, 1934269, 107761, 885543, 0, 2551314, 2229565, 1392777, 616280, 1368347, 154512, 1, 1668051, 0, 2453671, 2240909, 2661062, 2880183, 1376799, 0, 2252003, 1, 17666, 1, 2563626, 251045, 1593956, 2215158, 0, 93160, 0, 2463412, 654734, 1, 3341062, 3704395, 3841103, 609968, 2297131, 1942751, 3671207, 1, 1209611, 3163864, 3054774, 1055188, 1, 4284662, 3647599, 247779, 0, 176021, 3478840, 783050, 4613736, 2422927, 280158, 2473573, 2218037, 936624, 2118304, 353989, 3466709, 4737392, 2637048, 4570953, 1473551, 0, 0, 4780148, 3299784, 592717, 538363, 2068893, 814922, 2183138, 2011758, 2296545, 5075424, 1814196, 974225, 669506, 2756080, 5729359, 4599677, 5737886, 3947814, 4852062, 1571349, 4123825, 2319244, 4260764, 1266852, 1, 3739921, 1, 5948390, 1, 2761119, 2203699, 1664472, 3182598, 6269365, 5344900, 454610, 495499, 6407607, 1, 1, 476694, 4339987, 5642199, 1131185, 4092110, 2802555, 0, 5323448, 1103156, 2954018, 1, 1860057, 128891, 2586833, 6636077, 3136169, 1, 3280730, 6970001, 1874791, 48335, 6229468, 6384918, 5412112, 1, 7231540, 7886316, 2501899, 8047283, 2971582, 354078, 401999, 6427168, 4839680, 1, 44050, 3319427, 0, 1, 1452967, 4620879, 5525420, 5295860, 643415, 5594621, 951449, 1996797, 2561796, 6707895, 7072739]
| 196.473684
| 3,924
| 0.579248
| 2,018
| 11,199
| 3.207631
| 0.217542
| 0.003244
| 0.003862
| 0.005098
| 0.025027
| 0.014213
| 0
| 0
| 0
| 0
| 0
| 0.653564
| 0.184481
| 11,199
| 56
| 3,925
| 199.982143
| 0.055185
| 0.914903
| 0
| 0
| 0
| 0
| 0.006742
| 0
| 0
| 0
| 0.007865
| 0
| 0
| 1
| 0
| false
| 0
| 0.102564
| 0
| 0.102564
| 0.179487
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc110e2850b3632d0a46090bf0723ad016df9e7e
| 284
|
py
|
Python
|
src/py/test/unsupported_features.py
|
powderflask/gls-poetry
|
c55d650854957ac7a90ce8f133171c4da240b4f6
|
[
"MIT"
] | 1
|
2020-08-05T16:06:26.000Z
|
2020-08-05T16:06:26.000Z
|
src/py/test/unsupported_features.py
|
powderflask/gls-poetry
|
c55d650854957ac7a90ce8f133171c4da240b4f6
|
[
"MIT"
] | null | null | null |
src/py/test/unsupported_features.py
|
powderflask/gls-poetry
|
c55d650854957ac7a90ce8f133171c4da240b4f6
|
[
"MIT"
] | 1
|
2020-08-05T16:09:31.000Z
|
2020-08-05T16:09:31.000Z
|
import random
def run (autoTester):
def f():
return 3, 4
autoTester.check ((1, 2, *f()))
def g():
return [3, 4]
autoTester.check ([1, 2, *f()])
# doesn't compile
# d1 = {'a':1, 'b':2}
# d2 = {'c':3, **d1}
# autoTester.check(str(d2))
| 17.75
| 35
| 0.46831
| 41
| 284
| 3.243902
| 0.536585
| 0.338346
| 0.120301
| 0.270677
| 0.390977
| 0.390977
| 0.390977
| 0.390977
| 0
| 0
| 0
| 0.076923
| 0.31338
| 284
| 15
| 36
| 18.933333
| 0.605128
| 0.28169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375
| false
| 0
| 0.125
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
fc3882cc45a6890a805151e0d885de8507dff981
| 20,271
|
py
|
Python
|
converter.py
|
cascino/np2vox
|
cc2047aca8d0c3d49e11033aa2f54a19aa442213
|
[
"MIT"
] | null | null | null |
converter.py
|
cascino/np2vox
|
cc2047aca8d0c3d49e11033aa2f54a19aa442213
|
[
"MIT"
] | null | null | null |
converter.py
|
cascino/np2vox
|
cc2047aca8d0c3d49e11033aa2f54a19aa442213
|
[
"MIT"
] | null | null | null |
import webbrowser
import json
import numpy as np
from jinja2 import Template
class MeshConverter():
def __init__(self):
pass
def scale_verts(self, x, y, z, face_type):
x *= 2
y *= 2
z *= 2
verts = []
if face_type == 0:
verts = [(0.0 + x, 2.0 + y, 2.0 + z),
(0.0 + x, 0.0 + y, 2.0 + z),
(2.0 + x, 2.0 + y, 2.0 + z),
(2.0 + x, 0.0 + y, 2.0 + z)]
elif face_type == 1:
verts = [(0.0 + x, 0.0 + y, 0.0 + z),
(2.0 + x, 0.0 + y, 0.0 + z),
(0.0 + x, 0.0 + y, 2.0 + z),
(2.0 + x, 0.0 + y, 2.0 + z)]
elif face_type == 2:
verts = [(0.0 + x, 2.0 + y, 0.0 + z),
(0.0 + x, 0.0 + y, 0.0 + z),
(2.0 + x, 2.0 + y, 0.0 + z),
(2.0 + x, 0.0 + y, 0.0 + z)]
elif face_type == 3:
verts = [(0.0 + x, 2.0 + y, 0.0 + z),
(2.0 + x, 2.0 + y, 0.0 + z),
(0.0 + x, 2.0 + y, 2.0 + z),
(2.0 + x, 2.0 + y, 2.0 + z)]
elif face_type == 4:
verts = [(2.0 + x, 2.0 + y, 0.0 + z),
(2.0 + x, 0.0 + y, 0.0 + z),
(2.0 + x, 2.0 + y, 2.0 + z),
(2.0 + x, 0.0 + y, 2.0 + z)]
elif face_type == 5:
verts = [(0.0 + x, 2.0 + y, 0.0 + z),
(0.0 + x, 0.0 + y, 0.0 + z),
(0.0 + x, 2.0 + y, 2.0 + z),
(0.0 + x, 0.0 + y, 2.0 + z)]
return verts
def scale_faces(self, scale, face_type):
faces = np.array([[[0, 1, 3, 2]],
[[0, 1, 3, 2]],
[[1, 0, 2, 3]],
[[1, 0, 2, 3]],
[[1, 0, 2, 3]],
[[0, 1, 3, 2]]])
scaled_faces = faces[face_type] + 4 * scale
return scaled_faces.tolist()
def np2vox(self, bin_array):
'''Convert binary numpy ndarray to indexed 3D mesh data'''
print('--> BUILDING MESH')
print('--> VOXEL VOLUME:', np.count_nonzero(bin_array))
bin_array = np.fliplr(np.flipud(bin_array))
verts = []
faces = []
x, y, z = bin_array.shape[0], bin_array.shape[1], bin_array.shape[2]
x_max, y_max, z_max = x - 1, y - 1, z - 1
count = 0
bin_array[0, :, :] = 0
bin_array[:, 0, :] = 0
bin_array[:, :, 0] = 0
bin_array[x_max, :, :] = 0
bin_array[:, y_max, :] = 0
bin_array[:, :, z_max] = 0
for i in range(x):
for j in range(y):
for k in range(z):
current = bin_array[i,j,k]
if i == x_max:
i = x_max - 1
elif i == 0:
i = 1
else:
pass
if j == y_max:
j = y_max - 1
elif j == 0:
j = 1
else:
pass
if k == z_max:
k = z_max - 1
elif k == 0:
k = 1
else:
pass
surrounding = np.array([bin_array[i,j+1,k],
bin_array[i-1,j,k],
bin_array[i,j-1,k],
bin_array[i+1,j,k],
bin_array[i,j,k+1],
bin_array[i,j,k-1]], dtype=int)
if current == 1:
for num in range(len(surrounding)):
if surrounding[num] == 0:
verts += self.scale_verts(k, i, j, num)
faces += self.scale_faces(count, num)
count += 1
return verts, faces
def render_voxels(self, voxels):
'''Render np array in the browser as a mesh using np2vox func and three.js lib'''
verts, faces = self.np2vox(voxels)
mesh_data = {'verts': verts, 'faces': faces}
json_mesh = json.dumps(mesh_data)
html = Template('''<html>
<head>
<title>Viewer</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<link rel="stylesheet" type="text/css" href="css/styles.css"/>
</head>
<body>
<canvas id="canvas"></canvas>
<div id="top_panel"></div>
<div id="bottom_panel">
</div>
<script src="js/three.min.js"></script>
<script src="js/OrbitControls.js"></script>
<script>
var renderer = new THREE.WebGLRenderer({canvas: document.getElementById('canvas'), antialias: true});
var camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 0.1, 10000);
var scene = new THREE.Scene();
//renderer
renderer.setClearColor(0x37373B);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setPixelRatio(window.devicePixelRatio);
document.body.appendChild(renderer.domElement);
//scene and camera setup
camera.position.set(-200, 200, -200);
camera.up = new THREE.Vector3(0, 1, 0);
camera.lookAt(new THREE.Vector3(0, 0, 0))
scene.add(camera);
//controls
var controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.target.set( 0, 0, 0);
controls.update();
//controls.minDistance = 150;
controls.maxDistance = 2000;
controls.zoomSpeed = 0.5;
controls.enablePan = true;
controls.rotateSpeed = 0.5;
//lights
var light1 = new THREE.AmbientLight(0xffffff, 1.0);
scene.add(light1);
var light2 = new THREE.PointLight(0xff3333, 0.75);
light2.position.set(100, 200, 500);
scene.add(light2)
var light4 = new THREE.PointLight(0x3333ff, 0.75);
light2.position.set(-100, 200, -500);
scene.add(light4)
var light3 = new THREE.SpotLight(0xddddff, 1);
light3.position.set(-300, -300, 300);
scene.add(light3);
window.addEventListener('resize',windowResize, false);
function windowResize(){
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight);
}
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
}
var mesh_data = JSON.parse(JSON.stringify({{ data }}));
console.log(mesh_data)
var verts = mesh_data.verts;
var faces = mesh_data.faces;
var geometry = new THREE.Geometry();
for (i=0; i < verts.length; i++){
geometry.vertices.push(new THREE.Vector3(verts[i][1], verts[i][2], verts[i][0]));
}
for (i=0; i < faces.length; i++){
geometry.faces.push(new THREE.Face3(faces[i][0], faces[i][1], faces[i][2]));
geometry.faces.push(new THREE.Face3(faces[i][2], faces[i][3], faces[i][0]));
}
console.log('computing normals');
geometry.computeBoundingSphere();
geometry.computeFaceNormals();
geometry.computeVertexNormals();
console.log('building scene');
var material = new THREE.MeshLambertMaterial({
color: 0x616c72,
});
var material1 = new THREE.MeshLambertMaterial({
color: 0x000000,
wireframe: true,
transparent: true,
opacity: 0.7,
});
var voxmesh = new THREE.Mesh(geometry, material);
var voxmeshW = new THREE.Mesh(geometry, material1);
var geo = new THREE.PlaneGeometry(200, 200);
var mat = new THREE.MeshLambertMaterial({
color: 0xe0e3e5,
wireframe: false,
transparent: true,
opacity: 0.7,
side: THREE.DoubleSide
});
var plane = new THREE.Mesh(geo, mat);
plane.rotateX(Math.PI / 2);
scene.add(plane);
scene.add(voxmesh);
// scene.add(voxmeshW);
var x = {{x}};
var y = {{y}};
var z = {{z}};
voxmesh.position.set(x, y, z);
//voxmeshW.position.set(x, y, z);
var cube_geo = new THREE.BoxGeometry(5, 5, 5);
var cube_mat = new THREE.MeshLambertMaterial({ color: 0x442222});
var cube = new THREE.Mesh(cube_geo, cube_mat);
scene.add(cube);
cube.position.set(-102.5, 0, -102.5);
render();
</script>
</body>
</html>''')
new_html = html.render(data=json_mesh, x=-voxels.shape[0], y=voxels.shape[1] / 2, z=-voxels.shape[2])
path = './templates/template.html'
with open(path, 'w') as f:
f.write(new_html)
webbrowser.open(path, new=2)
def render_voxel_ani(self, vox_list, delay):
'''Displays list of np arrays in the browser as a mesh using np2vox func and three.js lib'''
verts_list = []
faces_list = []
for vox in vox_list:
# compute the mesh data for each voxel array
verts, faces = self.np2vox(vox)
verts_list.append(verts)
faces_list.append(faces)
# json version of list of mesh data
mesh_data = {'verts': verts_list, 'faces': faces_list}
json_meshes = json.dumps(mesh_data)
html = Template('''<html>
<head>
<title>Viewer</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<link rel="stylesheet" type="text/css" href="css/styles.css"/>
</head>
<body>
<canvas id="canvas"></canvas>
<div id="top_panel"></div>
<div id="bottom_panel">
</div>
<script src="js/three.min.js"></script>
<script src="js/OrbitControls.js"></script>
<script>
var renderer = new THREE.WebGLRenderer({canvas: document.getElementById('canvas'), antialias: true});
var camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 0.1, 10000);
var scene = new THREE.Scene();
renderer.setClearColor(0x37373B);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setPixelRatio(window.devicePixelRatio);
document.body.appendChild(renderer.domElement);
//scene and camera setup
camera.position.set(-200, 200, -200);
camera.up = new THREE.Vector3(0, 1, 0);
camera.lookAt(new THREE.Vector3(0, 0, 0))
scene.add(camera);
//controls
var controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.target.set( 0, 0, 0);
controls.update();
//controls.minDistance = 150;
controls.maxDistance = 2000;
controls.zoomSpeed = 0.5;
controls.enablePan = true;
controls.rotateSpeed = 0.5;
//lights
var light1 = new THREE.AmbientLight(0xffffff, 1.0);
scene.add(light1);
var light2 = new THREE.PointLight(0xff3333, 0.75);
light2.position.set(100, 200, 500);
scene.add(light2)
var light4 = new THREE.PointLight(0x3333ff, 0.75);
light2.position.set(-100, 200, -500);
scene.add(light4)
var light3 = new THREE.SpotLight(0xddddff, 1);
light3.position.set(-300, -300, 300);
scene.add(light3);
window.addEventListener('resize',windowResize, false);
function windowResize(){
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight);
}
function render() {
requestAnimationFrame(render);
renderer.render(scene, camera);
}
var material = new THREE.MeshLambertMaterial({color: 0x616c72});
var material1 = new THREE.MeshLambertMaterial({
color: 0x000000,
wireframe: true,
transparent: true,
opacity: 0.7,
});
var geo = new THREE.PlaneGeometry(200, 200);
var mat = new THREE.MeshLambertMaterial({
color: 0xe0e3e5,
wireframe: false,
transparent: true,
opacity: 0.7,
side: THREE.DoubleSide
});
var plane = new THREE.Mesh(geo, mat);
plane.rotateX(Math.PI / 2);
scene.add(plane);
var x = {{x}};
var y = {{y}};
var z = {{z}};
var cube_geo = new THREE.BoxGeometry(5, 5, 5);
var cube_mat = new THREE.MeshLambertMaterial({ color: 0x442222});
var cube = new THREE.Mesh(cube_geo, cube_mat);
scene.add(cube);
cube.position.set(-102.5, 0, -102.5);
// mesh
var mesh_data = JSON.parse(JSON.stringify({{ mesh_list }}));
var verts_list = mesh_data.verts;
var faces_list = mesh_data.faces;
for (j=0; j < verts_list.length; j++)
{
// build each object as a three js mesh
verts = verts_list[j];
faces = faces_list[j];
var geometry = new THREE.Geometry();
for (i=0; i < verts.length; i++){
geometry.vertices.push(new THREE.Vector3(verts[i][1], verts[i][2], verts[i][0]));
}
for (i=0; i < faces.length; i++){
geometry.faces.push(new THREE.Face3(faces[i][0], faces[i][1], faces[i][2]));
geometry.faces.push(new THREE.Face3(faces[i][2], faces[i][3], faces[i][0]));
}
geometry.computeBoundingSphere();
geometry.computeFaceNormals();
geometry.computeVertexNormals();
var voxmesh = new THREE.Mesh(geometry, material);
voxmesh.name = 'mesh' + i.toString();
voxmesh.visible = false;
voxmesh.position.set(x, y, z);
scene.add(voxmesh);
console.log('built mesh');
}
var delay = {{ speed }};
var i = 7;
setInterval(function()
{
if (i == scene.children.length - 1){
scene.children[i-1].visible = false;
i = 7;
} else {
if (i != 7){
scene.children[i-1].visible = false;
}
scene.children[i].visible = true;
i++;
}
}, delay);
render();
</script>
</body>
</html>''')
new_html = html.render(mesh_list=json_meshes,
x=-vox_list[0].shape[0],
y=vox_list[0].shape[1] / 2,
z=-vox_list[0].shape[2],
speed=delay)
path = './templates/template_ani.html'
with open(path, 'w') as f:
f.write(new_html)
webbrowser.open(path, new=2)
| 45.247768
| 128
| 0.374328
| 1,836
| 20,271
| 4.075708
| 0.135076
| 0.012562
| 0.004811
| 0.006415
| 0.757183
| 0.746759
| 0.709876
| 0.691033
| 0.677937
| 0.674061
| 0
| 0.059996
| 0.519807
| 20,271
| 448
| 129
| 45.247768
| 0.708753
| 0.014454
| 0
| 0.624665
| 0
| 0.02681
| 0.726057
| 0.147835
| 0
| 0
| 0.007379
| 0
| 0
| 1
| 0.016086
| false
| 0.010724
| 0.010724
| 0
| 0.037534
| 0.005362
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fc40f8586711f193c0ecb3c74d2639a1dddae59e
| 152
|
py
|
Python
|
proglearn/sims/__init__.py
|
KhelmholtzR/ProgLearn
|
f5177c720e53d2f5936272998b94e0746135a3b9
|
[
"MIT"
] | 18
|
2020-10-13T00:43:06.000Z
|
2022-03-28T09:03:52.000Z
|
proglearn/sims/__init__.py
|
KhelmholtzR/ProgLearn
|
f5177c720e53d2f5936272998b94e0746135a3b9
|
[
"MIT"
] | 234
|
2020-10-04T17:19:15.000Z
|
2022-03-17T15:43:57.000Z
|
proglearn/sims/__init__.py
|
KhelmholtzR/ProgLearn
|
f5177c720e53d2f5936272998b94e0746135a3b9
|
[
"MIT"
] | 33
|
2020-06-10T23:12:09.000Z
|
2020-09-28T05:09:44.000Z
|
from .gaussian_sim import generate_gaussian_parity
from .spiral_sim import generate_spirals
__all__ = ["generate_gaussian_parity", "generate_spirals"]
| 30.4
| 58
| 0.848684
| 19
| 152
| 6.157895
| 0.473684
| 0.153846
| 0.290598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085526
| 152
| 4
| 59
| 38
| 0.841727
| 0
| 0
| 0
| 1
| 0
| 0.263158
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fc519ef9bec6277eee950d64bb9c3294e0696570
| 80
|
py
|
Python
|
build/lib/vengeance/excel_com/classes/__init__.py
|
michael-ross-ven/vengeance
|
53c6eefba0573936d22a55ba5900744ac701f4b9
|
[
"MIT"
] | 1
|
2020-01-18T18:23:26.000Z
|
2020-01-18T18:23:26.000Z
|
build/lib/vengeance/excel_com/classes/__init__.py
|
michael-ross-ven/vengeance
|
53c6eefba0573936d22a55ba5900744ac701f4b9
|
[
"MIT"
] | null | null | null |
build/lib/vengeance/excel_com/classes/__init__.py
|
michael-ross-ven/vengeance
|
53c6eefba0573936d22a55ba5900744ac701f4b9
|
[
"MIT"
] | null | null | null |
from .excel_levity_cls import excel_levity_cls
__all__ = ['excel_levity_cls']
| 16
| 46
| 0.8125
| 12
| 80
| 4.583333
| 0.5
| 0.6
| 0.763636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1125
| 80
| 4
| 47
| 20
| 0.774648
| 0
| 0
| 0
| 0
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fc551927de752b3f6d58f0019a430303884b23a3
| 190
|
py
|
Python
|
gamefixes/750920.py
|
manueliglesiasgarcia/protonfixes
|
d676b6bf39f6e4268b4791d3d71c6d74e2127121
|
[
"BSD-2-Clause"
] | 54
|
2019-06-21T22:03:45.000Z
|
2022-03-20T19:24:36.000Z
|
gamefixes/750920.py
|
manueliglesiasgarcia/protonfixes
|
d676b6bf39f6e4268b4791d3d71c6d74e2127121
|
[
"BSD-2-Clause"
] | 21
|
2020-06-13T22:49:18.000Z
|
2022-03-20T08:28:39.000Z
|
gamefixes/750920.py
|
manueliglesiasgarcia/protonfixes
|
d676b6bf39f6e4268b4791d3d71c6d74e2127121
|
[
"BSD-2-Clause"
] | 53
|
2019-09-11T15:23:25.000Z
|
2022-03-20T08:18:49.000Z
|
""" Shadow of the Tomb Raider
"""
#pylint: disable=C0103
from protonfixes import util
def main():
""" Requires media foundation dlls
"""
util.protontricks('d3dcompiler_47')
| 14.615385
| 39
| 0.673684
| 22
| 190
| 5.772727
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046358
| 0.205263
| 190
| 12
| 40
| 15.833333
| 0.794702
| 0.436842
| 0
| 0
| 0
| 0
| 0.150538
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5d92cf879ff05b949b7a671ad2df7770c00c0259
| 79
|
py
|
Python
|
BICAMSZ/__init__.py
|
Sdniss/BICAMSZ
|
b6d4f1d5fbd515c01ef71e692474e2ad4f47c843
|
[
"MIT"
] | null | null | null |
BICAMSZ/__init__.py
|
Sdniss/BICAMSZ
|
b6d4f1d5fbd515c01ef71e692474e2ad4f47c843
|
[
"MIT"
] | null | null | null |
BICAMSZ/__init__.py
|
Sdniss/BICAMSZ
|
b6d4f1d5fbd515c01ef71e692474e2ad4f47c843
|
[
"MIT"
] | null | null | null |
from .functions import normalization_pipeline, data_check, pipeline_for_pandas
| 39.5
| 78
| 0.886076
| 10
| 79
| 6.6
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075949
| 79
| 1
| 79
| 79
| 0.90411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5d986b925de6e94bc3eebb59c91ec8728b270fe1
| 176
|
py
|
Python
|
accounts/urls.py
|
doorsas/rustusgid10
|
6f1cfc16328ebb8a54590a327d7153fcfd487cf8
|
[
"MIT"
] | null | null | null |
accounts/urls.py
|
doorsas/rustusgid10
|
6f1cfc16328ebb8a54590a327d7153fcfd487cf8
|
[
"MIT"
] | null | null | null |
accounts/urls.py
|
doorsas/rustusgid10
|
6f1cfc16328ebb8a54590a327d7153fcfd487cf8
|
[
"MIT"
] | null | null | null |
from django.urls import path, include
from .views import HomePageView, AboutPageView,PugosPageView, HomeView, ArticleDetailView, AddPostView,UpdatePostView, DeletePostView
| 25.142857
| 133
| 0.840909
| 17
| 176
| 8.705882
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102273
| 176
| 6
| 134
| 29.333333
| 0.936709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5da1fac924d1d1be3251f1ac4aca73bf89946b8a
| 4,448
|
py
|
Python
|
sahara_plugin_vanilla/tests/unit/plugins/vanilla/v2_7_5/test_edp_engine.py
|
tellesnobrega/sahara-plugin-vanilla
|
0637de5fdb7102993e272c04aeb7e05e46d7be38
|
[
"Apache-2.0"
] | null | null | null |
sahara_plugin_vanilla/tests/unit/plugins/vanilla/v2_7_5/test_edp_engine.py
|
tellesnobrega/sahara-plugin-vanilla
|
0637de5fdb7102993e272c04aeb7e05e46d7be38
|
[
"Apache-2.0"
] | null | null | null |
sahara_plugin_vanilla/tests/unit/plugins/vanilla/v2_7_5/test_edp_engine.py
|
tellesnobrega/sahara-plugin-vanilla
|
0637de5fdb7102993e272c04aeb7e05e46d7be38
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from sahara.plugins import edp
from sahara_plugin_vanilla.plugins.vanilla.v2_7_5 import edp_engine
from sahara_plugin_vanilla.tests.unit import base as sahara_base
class Vanilla2ConfigHintsTest(sahara_base.SaharaTestCase):
@mock.patch(
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
'get_possible_hive_config_from',
return_value={})
def test_get_possible_job_config_hive(
self, get_possible_hive_config_from):
expected_config = {'job_config': {}}
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_HIVE)
get_possible_hive_config_from.assert_called_once_with(
'plugins/vanilla/v2_7_5/resources/hive-default.xml')
self.assertEqual(expected_config, actual_config)
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.hadoop2.edp_engine.'
'EdpOozieEngine')
def test_get_possible_job_config_java(self, BaseVanillaEdpOozieEngine):
expected_config = {'job_config': {}}
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
expected_config)
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_JAVA)
(BaseVanillaEdpOozieEngine.get_possible_job_config.
assert_called_once_with(edp.JOB_TYPE_JAVA))
self.assertEqual(expected_config, actual_config)
@mock.patch(
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
'get_possible_mapreduce_config_from',
return_value={})
def test_get_possible_job_config_mapreduce(
self, get_possible_mapreduce_config_from):
expected_config = {'job_config': {}}
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_MAPREDUCE)
get_possible_mapreduce_config_from.assert_called_once_with(
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')
self.assertEqual(expected_config, actual_config)
@mock.patch(
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
'get_possible_mapreduce_config_from',
return_value={})
def test_get_possible_job_config_mapreduce_streaming(
self, get_possible_mapreduce_config_from):
expected_config = {'job_config': {}}
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_MAPREDUCE_STREAMING)
get_possible_mapreduce_config_from.assert_called_once_with(
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')
self.assertEqual(expected_config, actual_config)
@mock.patch(
'sahara_plugin_vanilla.plugins.vanilla.confighints_helper.'
'get_possible_pig_config_from',
return_value={})
def test_get_possible_job_config_pig(
self, get_possible_pig_config_from):
expected_config = {'job_config': {}}
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_PIG)
get_possible_pig_config_from.assert_called_once_with(
'plugins/vanilla/v2_7_5/resources/mapred-default.xml')
self.assertEqual(expected_config, actual_config)
@mock.patch('sahara_plugin_vanilla.plugins.vanilla.hadoop2.edp_engine.'
'EdpOozieEngine')
def test_get_possible_job_config_shell(self, BaseVanillaEdpOozieEngine):
expected_config = {'job_config': {}}
BaseVanillaEdpOozieEngine.get_possible_job_config.return_value = (
expected_config)
actual_config = edp_engine.EdpOozieEngine.get_possible_job_config(
edp.JOB_TYPE_SHELL)
(BaseVanillaEdpOozieEngine.get_possible_job_config.
assert_called_once_with(edp.JOB_TYPE_SHELL))
self.assertEqual(expected_config, actual_config)
| 44.929293
| 76
| 0.732014
| 539
| 4,448
| 5.627087
| 0.213358
| 0.10155
| 0.073854
| 0.105506
| 0.778767
| 0.745137
| 0.731619
| 0.731619
| 0.731619
| 0.731619
| 0
| 0.007226
| 0.191097
| 4,448
| 98
| 77
| 45.387755
| 0.835742
| 0.124326
| 0
| 0.653333
| 0
| 0
| 0.195103
| 0.172423
| 0
| 0
| 0
| 0
| 0.16
| 1
| 0.08
| false
| 0
| 0.053333
| 0
| 0.146667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5da562d5cb4133173c624acbcfd6bd52d998473f
| 969
|
py
|
Python
|
tests/outlook/test_folder.py
|
matthewgdv/office
|
8a779cecb9382a196a34a358c43d23a30c48bb04
|
[
"MIT"
] | 1
|
2020-12-26T16:08:42.000Z
|
2020-12-26T16:08:42.000Z
|
tests/outlook/test_folder.py
|
matthewgdv/office
|
8a779cecb9382a196a34a358c43d23a30c48bb04
|
[
"MIT"
] | null | null | null |
tests/outlook/test_folder.py
|
matthewgdv/office
|
8a779cecb9382a196a34a358c43d23a30c48bb04
|
[
"MIT"
] | 1
|
2021-05-30T11:25:20.000Z
|
2021-05-30T11:25:20.000Z
|
# import pytest
class TestMessageFolder:
def test_folders(self): # synced
assert True
def test_messages(self): # synced
assert True
def test_order_messages_by_date(): # synced
assert True
class TestAttributes:
class TestChildFolderCount:
pass
class TestTotalItemCount:
pass
class TestUnreadItemCount:
pass
class TestName:
pass
class TestChildFolders:
pass
class TestMessages:
pass
class TestBulkMessageFolderAction:
def test_move(self): # synced
assert True
def test_delete(self): # synced
assert True
def test_copy(self): # synced
assert True
class TestMessageFolderQuery:
def test___getitem__(self): # synced
assert True
def test_execute(self): # synced
assert True
def test_bulk(self): # synced
assert True
| 17.944444
| 48
| 0.598555
| 93
| 969
| 6.064516
| 0.333333
| 0.111702
| 0.255319
| 0.283688
| 0.287234
| 0.287234
| 0
| 0
| 0
| 0
| 0
| 0
| 0.352941
| 969
| 53
| 49
| 18.283019
| 0.899522
| 0.078431
| 0
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.264706
| 1
| 0.264706
| false
| 0.176471
| 0
| 0
| 0.558824
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
5dbb49ce14155c4603dc1e0f6c6622f904f2103f
| 318
|
py
|
Python
|
api/tests/test_dangling_admin.py
|
ShubhamGG/Anubis
|
2c538ef258a1edf5463596a33bc66caa2ef7e35b
|
[
"MIT"
] | 2
|
2022-02-24T17:39:27.000Z
|
2022-02-25T02:14:06.000Z
|
api/tests/test_dangling_admin.py
|
ShubhamGG/Anubis
|
2c538ef258a1edf5463596a33bc66caa2ef7e35b
|
[
"MIT"
] | null | null | null |
api/tests/test_dangling_admin.py
|
ShubhamGG/Anubis
|
2c538ef258a1edf5463596a33bc66caa2ef7e35b
|
[
"MIT"
] | null | null | null |
from utils import permission_test
def test_dangling_admin():
permission_test('/admin/dangling/list', fail_for=['student', 'ta', 'professor'])
permission_test('/admin/dangling/reset', fail_for=['student', 'ta', 'professor'])
permission_test('/admin/dangling/fix', fail_for=['student', 'ta', 'professor'])
| 39.75
| 85
| 0.710692
| 39
| 318
| 5.564103
| 0.410256
| 0.258065
| 0.262673
| 0.373272
| 0.59447
| 0.479263
| 0.479263
| 0.479263
| 0.479263
| 0
| 0
| 0
| 0.100629
| 318
| 7
| 86
| 45.428571
| 0.758741
| 0
| 0
| 0
| 0
| 0
| 0.358491
| 0.066038
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5dca870e069c6a0a5d577bc3920a45fd2a1a7e19
| 5,289
|
py
|
Python
|
KNN/kNN.py
|
lance52587/MachineLearningNote
|
0184f8de178990ee31ace2a43809830874313697
|
[
"MIT"
] | 37
|
2018-06-06T05:58:54.000Z
|
2022-02-21T04:49:54.000Z
|
KNN/kNN.py
|
YYangjlu/MachineLearningNote
|
d34a9b57af3b2c6f276d14c2a7a3dccadb585421
|
[
"MIT"
] | null | null | null |
KNN/kNN.py
|
YYangjlu/MachineLearningNote
|
d34a9b57af3b2c6f276d14c2a7a3dccadb585421
|
[
"MIT"
] | 35
|
2018-10-01T16:12:34.000Z
|
2022-02-21T03:35:33.000Z
|
# -*- coding: utf-8 -*-
# @Date : 2017-04-03 15:47:04
# @Author : Alan Lau (rlalan@outlook.com)
# import numpy as np
# import distance
# import fwalker
# import reader
# import statistic
# def get_data(data_path):
# label_vec = []
# files = fwalker.fun(data_path)
# for file in files:
# ech_label_vec = []
# ech_label = int((file.split('\\'))[-1][0])
# ech_vec = ((np.loadtxt(file)).ravel())
# ech_label_vec.append(ech_label)
# ech_label_vec.append(ech_vec)
# label_vec.append(ech_label_vec)
# return label_vec
# def find_label(train_vec_list, vec, k):
# get_label_list = []
# for ech_trainlabel_vec in train_vec_list:
# ech_label_distance = []
# train_label, train_vec = ech_trainlabel_vec[0], ech_trainlabel_vec[1]
# vec_distance = distance.Euclidean(train_vec, vec)
# ech_label_distance.append(train_label)
# ech_label_distance.append(vec_distance)
# get_label_list.append(ech_label_distance)
# result_k = np.array(get_label_list)
# order_distance = (result_k.T)[1].argsort()
# order = np.array((result_k[order_distance].T)[0])
# top_k = np.array(order[:k], dtype=int)
# find_label = statistic.orderdic(statistic.statistic(top_k), True)[0][0]
# return find_label
# def classify(train_vec_list, test_vec_list, k):
# error_counter = 0
# for ech_label_vec in test_vec_list:
# label, vec = ech_label_vec[0], ech_label_vec[1]
# get_label = find_label(train_vec_list, vec, k)
# print('Original label is:'+str(label) +
# ', kNN label is:'+str(get_label))
# if str(label) != str(get_label):
# error_counter += 1
# else:
# continue
# true_probability = str(
# round((1-error_counter/len(test_vec_list))*100, 2))+'%'
# print('Correct probability:'+true_probability)
# def main():
# k = 3
# train_data_path = r'D:\DevelopmentLanguage\Python\MachineLearning\Learning\KNN\lab3_0930\input_digits\trainingDigits'
# test_data_path = r'D:\DevelopmentLanguage\Python\MachineLearning\Learning\KNN\lab3_0930\input_digits\testDigits'
# train_vec_list = get_data(train_data_path)
# test_vec_list = get_data(test_data_path)
# classify(train_vec_list, test_vec_list, k)
# if __name__ == '__main__':
# main()
# -*- coding: utf-8 -*-
# @Date : 2017-04-03 15:47:04
# @Author : Alan Lau (rlalan@outlook.com)
import os
import math
import collections
import numpy as np
def Euclidean(vec1, vec2):
npvec1, npvec2 = np.array(vec1), np.array(vec2)
return math.sqrt(((npvec1 - npvec2)**2).sum())
def fwalker(path):
fileArray = []
for root, dirs, files in os.walk(path):
for fn in files:
eachpath = str(root + '\\' + fn)
fileArray.append(eachpath)
return fileArray
def orderdic(dic, reverse):
ordered_list = sorted(
dic.items(), key=lambda item: item[1], reverse=reverse)
return ordered_list
def get_data(data_path):
label_vec = []
files = fwalker(data_path)
for file in files:
ech_label_vec = []
ech_label = int((file.split('\\'))[-1][0]) # 获取每个向量的标签
ech_vec = ((np.loadtxt(file)).ravel()) # 获取每个文件的向量
ech_label_vec.append(ech_label) # 将一个文件夹的标签和向量放到同一个list内
ech_label_vec.append(
ech_vec
) # 将一个文件夹的标签和向量放到同一个list内,目的是将标签和向量对应起来,类似于字典,这里不直接用字典因为字典的键(key)不可重复。
label_vec.append(ech_label_vec) # 再将所有的标签和向量存入一个list内,构成二维数组
return label_vec
def find_label(train_vec_list, vec, k):
get_label_list = []
for ech_trainlabel_vec in train_vec_list:
ech_label_distance = []
train_label, train_vec = ech_trainlabel_vec[0], ech_trainlabel_vec[1]
vec_distance = Euclidean(train_vec, vec) # 计算距离
ech_label_distance.append(train_label)
ech_label_distance.append(vec_distance) # 将距离和标签对应存入list
get_label_list.append(ech_label_distance)
result_k = np.array(get_label_list)
order_distance = (result_k.T)[1].argsort() # 对距离进行排序
order = np.array((result_k[order_distance].T)[0])
top_k = np.array(order[:k], dtype=int) # 获取前k距离和标签
find_label = orderdic(collections.Counter(top_k),
True)[0][0] # 统计在前k排名中标签出现频次
return find_label
def classify(train_vec_list, test_vec_list, k):
error_counter = 0 #计数器,计算错误率
for ech_label_vec in test_vec_list:
label, vec = ech_label_vec[0], ech_label_vec[1]
get_label = find_label(train_vec_list, vec, k) # 获得学习得到的标签
print('Original label is:' + str(label) + ', kNN label is:' +
str(get_label))
if str(label) != str(get_label):
error_counter += 1
else:
continue
true_probability = str(
round((1 - error_counter / len(test_vec_list)) * 100, 2)) + '%'
print('Correct probability:' + true_probability)
def main():
k = 3
train_data_path = r'..\KNN\lab3_0930\input_digits\trainingDigits'
test_data_path = r'..\KNN\lab3_0930\input_digits\testDigits'
train_vec_list = get_data(train_data_path)
test_vec_list = get_data(test_data_path)
classify(train_vec_list, test_vec_list, k)
if __name__ == '__main__':
main()
| 34.568627
| 123
| 0.650974
| 723
| 5,289
| 4.44675
| 0.181189
| 0.064697
| 0.0479
| 0.031726
| 0.789425
| 0.768274
| 0.707932
| 0.707932
| 0.707932
| 0.684292
| 0
| 0.02338
| 0.223672
| 5,289
| 153
| 124
| 34.568627
| 0.75962
| 0.482889
| 0
| 0
| 0
| 0
| 0.056327
| 0.031543
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.057143
| 0
| 0.228571
| 0.028571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5dd0b066966e752618b683585541142ec2b6dc9f
| 238
|
py
|
Python
|
garage/tf/envs/__init__.py
|
shadiakiki1986/garage
|
095bb5d25b32df1d44b47e99a78a9b01796941d9
|
[
"MIT"
] | 3
|
2019-08-11T22:26:55.000Z
|
2020-11-28T10:23:50.000Z
|
garage/tf/envs/__init__.py
|
shadiakiki1986/garage
|
095bb5d25b32df1d44b47e99a78a9b01796941d9
|
[
"MIT"
] | null | null | null |
garage/tf/envs/__init__.py
|
shadiakiki1986/garage
|
095bb5d25b32df1d44b47e99a78a9b01796941d9
|
[
"MIT"
] | 2
|
2019-08-11T22:30:14.000Z
|
2021-03-25T02:57:50.000Z
|
from garage.tf.envs.base import TfEnv
from garage.tf.envs.parallel_vec_env_executor import ParallelVecEnvExecutor
from garage.tf.envs.vec_env_executor import VecEnvExecutor
__all__ = ["TfEnv", "ParallelVecEnvExecutor", "VecEnvExecutor"]
| 39.666667
| 75
| 0.840336
| 30
| 238
| 6.366667
| 0.466667
| 0.157068
| 0.188482
| 0.251309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07563
| 238
| 5
| 76
| 47.6
| 0.868182
| 0
| 0
| 0
| 0
| 0
| 0.172269
| 0.092437
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5dd1af8eac6fe8e15edb39cb5e7679147307da3d
| 96,551
|
py
|
Python
|
tables/tests/test_indexes.py
|
crs4/PyTables
|
02d23d41f714122fd5fd4f7e1063c0b31d1a774b
|
[
"BSD-3-Clause"
] | 1
|
2020-12-27T13:53:00.000Z
|
2020-12-27T13:53:00.000Z
|
tables/tests/test_indexes.py
|
mrgloom/PyTables
|
c30c6f40cd3d5996ee711d5685328085f3569cfc
|
[
"BSD-3-Clause"
] | null | null | null |
tables/tests/test_indexes.py
|
mrgloom/PyTables
|
c30c6f40cd3d5996ee711d5685328085f3569cfc
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
import os
import tempfile
import copy
from tables import *
from tables.index import Index, default_auto_index, default_index_filters
from tables.idxutils import calc_chunksize
from tables.tests.common import verbose, allequal, heavy, cleanup, \
PyTablesTestCase, TempFileMixin
from tables.exceptions import OldIndexWarning
# To delete the internal attributes automagically
unittest.TestCase.tearDown = cleanup
import numpy
# Sensible parameters for indexing with small blocksizes
minRowIndex = 10
small_blocksizes = (96, 24, 6, 3)
class TDescr(IsDescription):
var1 = StringCol(itemsize=4, dflt=b"", pos=1)
var2 = BoolCol(dflt=0, pos=2)
var3 = IntCol(dflt=0, pos=3)
var4 = FloatCol(dflt=0, pos=4)
class BasicTestCase(PyTablesTestCase):
compress = 0
complib = "zlib"
shuffle = 0
fletcher32 = 0
nrows = minRowIndex
ss = small_blocksizes[2]
def setUp(self):
# Create an instance of an HDF5 Table
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, "w")
self.rootgroup = self.fileh.root
self.populateFile()
# Close the file
self.fileh.close()
def populateFile(self):
group = self.rootgroup
# Create a table
title = "This is the IndexArray title"
self.filters = Filters(complevel=self.compress,
complib=self.complib,
shuffle=self.shuffle,
fletcher32=self.fletcher32)
table = self.fileh.create_table(group, 'table', TDescr, title,
self.filters, self.nrows)
for i in range(self.nrows):
table.row['var1'] = str(i).encode('ascii')
# table.row['var2'] = i > 2
table.row['var2'] = i % 2
table.row['var3'] = i
table.row['var4'] = float(self.nrows - i - 1)
table.row.append()
table.flush()
# Index all entries:
for col in table.colinstances.itervalues():
indexrows = col.create_index(_blocksizes=small_blocksizes)
if verbose:
print "Number of written rows:", self.nrows
print "Number of indexed rows:", indexrows
return
def tearDown(self):
self.fileh.close()
# print "File %s not removed!" % self.file
os.remove(self.file)
cleanup(self)
#----------------------------------------
def test00_flushLastRow(self):
"""Checking flushing an Index incrementing only the last row."""
if verbose:
print '\n', '-=' * 30
print "Running %s.test00_flushLastRow..." % self.__class__.__name__
# Open the HDF5 file in append mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
# Add just 3 rows more
for i in range(3):
table.row['var1'] = str(i).encode('ascii')
table.row.append()
table.flush() # redo the indexes
idxcol = table.cols.var1.index
if verbose:
print "Max rows in buf:", table.nrowsinbuf
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
print "Elements in last row:", idxcol.indicesLR[-1]
# Do a selection
results = [p["var1"] for p in table.where('var1 == b"1"')]
self.assertEqual(len(results), 2)
self.assertEqual(results, [b'1']*2)
def test00_update(self):
"""Checking automatic re-indexing after an update operation."""
if verbose:
print '\n', '-=' * 30
print "Running %s.test00_update..." % self.__class__.__name__
# Open the HDF5 file in append mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
# Modify a couple of columns
for i, row in enumerate(table.where("(var3>1) & (var3<5)")):
row['var1'] = str(i)
row['var3'] = i
row.update()
table.flush() # redo the indexes
idxcol1 = table.cols.var1.index
idxcol3 = table.cols.var3.index
if verbose:
print "Dirtyness of var1 col:", idxcol1.dirty
print "Dirtyness of var3 col:", idxcol3.dirty
self.assertEqual(idxcol1.dirty, False)
self.assertEqual(idxcol3.dirty, False)
# Do a couple of selections
results = [p["var1"] for p in table.where('var1 == b"1"')]
self.assertEqual(len(results), 2)
self.assertEqual(results, [b'1']*2)
results = [p["var3"] for p in table.where('var3 == 0')]
self.assertEqual(len(results), 2)
self.assertEqual(results, [0]*2)
def test01_readIndex(self):
"""Checking reading an Index (string flavor)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test01_readIndex..." % self.__class__.__name__
# Open the HDF5 file in read-only mode
self.fileh = open_file(self.file, mode="r")
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Max rows in buf:", table.nrowsinbuf
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
# Do a selection
results = [p["var1"] for p in table.where('var1 == b"1"')]
self.assertEqual(len(results), 1)
self.assertEqual(results, [b'1'])
def test02_readIndex(self):
"""Checking reading an Index (bool flavor)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test02_readIndex..." % self.__class__.__name__
# Open the HDF5 file in read-only mode
self.fileh = open_file(self.file, mode="r")
table = self.fileh.root.table
idxcol = table.cols.var2.index
if verbose:
print "Rows in table:", table.nrows
print "Max rows in buf:", table.nrowsinbuf
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
# Do a selection
results = [p["var2"] for p in table.where('var2 == True')]
if verbose:
print "Selected values:", results
self.assertEqual(len(results), self.nrows // 2)
self.assertEqual(results, [True]*(self.nrows // 2))
def test03_readIndex(self):
"""Checking reading an Index (int flavor)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test03_readIndex..." % self.__class__.__name__
# Open the HDF5 file in read-only mode
self.fileh = open_file(self.file, mode="r")
table = self.fileh.root.table
idxcol = table.cols.var3.index
if verbose:
print "Max rows in buf:", table.nrowsinbuf
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
# Do a selection
results = [p["var3"] for p in table.where('(1<var3)&(var3<10)')]
if verbose:
print "Selected values:", results
self.assertEqual(len(results), min(10, table.nrows) - 2)
self.assertEqual(results, range(2, min(10, table.nrows)))
def test04_readIndex(self):
"""Checking reading an Index (float flavor)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test04_readIndex..." % self.__class__.__name__
# Open the HDF5 file in read-only mode
self.fileh = open_file(self.file, mode="r")
table = self.fileh.root.table
idxcol = table.cols.var4.index
if verbose:
print "Max rows in buf:", table.nrowsinbuf
print "Number of rows in table:", table.nrows
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
# Do a selection
results = [p["var4"] for p in table.where('var4 < 10')]
# results = [p["var4"] for p in table.where('(1<var4)&(var4<10)')]
if verbose:
print "Selected values:", results
self.assertEqual(len(results), min(10, table.nrows))
self.assertEqual(results, [float(i) for i in
reversed(range(min(10, table.nrows)))])
def test05_getWhereList(self):
"""Checking reading an Index with get_where_list (string flavor)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test05_getWhereList..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var4.index
if verbose:
print "Max rows in buf:", table.nrowsinbuf
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
# Do a selection
table.flavor = "python"
rowList1 = table.get_where_list('var1 < b"10"')
rowList2 = [p.nrow for p in table if p['var1'] < b"10"]
if verbose:
print "Selected values:", rowList1
print "Should look like:", rowList2
self.assertEqual(len(rowList1), len(rowList2))
self.assertEqual(rowList1, rowList2)
def test06_getWhereList(self):
"""Checking reading an Index with get_where_list (bool flavor)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test06_getWhereList..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var2.index
if verbose:
print "Max rows in buf:", table.nrowsinbuf
print "Rows in tables:", table.nrows
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
# Do a selection
table.flavor = "numpy"
rowList1 = table.get_where_list('var2 == False', sort=True)
rowList2 = [p.nrow for p in table if p['var2'] == False]
# Convert to a NumPy object
rowList2 = numpy.array(rowList2, numpy.int64)
if verbose:
print "Selected values:", rowList1
print "Should look like:", rowList2
self.assertEqual(len(rowList1), len(rowList2))
self.assertTrue(allequal(rowList1, rowList2))
def test07_getWhereList(self):
"""Checking reading an Index with get_where_list (int flavor)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test07_getWhereList..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var4.index
if verbose:
print "Max rows in buf:", table.nrowsinbuf
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
# Do a selection
table.flavor = "python"
rowList1 = table.get_where_list('var3 < 15', sort=True)
rowList2 = [p.nrow for p in table if p["var3"] < 15]
if verbose:
print "Selected values:", rowList1
print "Should look like:", rowList2
self.assertEqual(len(rowList1), len(rowList2))
self.assertEqual(rowList1, rowList2)
def test08_getWhereList(self):
"""Checking reading an Index with get_where_list (float flavor)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test08_getWhereList..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var4.index
if verbose:
print "Max rows in buf:", table.nrowsinbuf
print "Number of elements per slice:", idxcol.slicesize
print "Chunk size:", idxcol.sorted.chunksize
# Do a selection
table.flavor = "python"
rowList1 = table.get_where_list('var4 < 10', sort=True)
rowList2 = [p.nrow for p in table if p['var4'] < 10]
if verbose:
print "Selected values:", rowList1
print "Should look like:", rowList2
self.assertEqual(len(rowList1), len(rowList2))
self.assertEqual(rowList1, rowList2)
def test09a_removeIndex(self):
"""Checking removing an index"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test09a_removeIndex..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Before deletion"
print "var1 column:", table.cols.var1
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
# delete the index
table.cols.var1.remove_index()
if verbose:
print "After deletion"
print "var1 column:", table.cols.var1
self.assertTrue(table.cols.var1.index is None)
self.assertEqual(table.colindexed["var1"], 0)
# re-create the index again
indexrows = table.cols.var1.create_index(_blocksizes=small_blocksizes)
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
if verbose:
print "After re-creation"
print "var1 column:", table.cols.var1
self.assertTrue(idxcol is not None)
self.assertEqual(table.colindexed["var1"], 1)
def test09b_removeIndex(self):
"""Checking removing an index (persistent version)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test09b_removeIndex..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Before deletion"
print "var1 index column:", table.cols.var1
self.assertTrue(idxcol is not None)
self.assertEqual(table.colindexed["var1"], 1)
# delete the index
table.cols.var1.remove_index()
# close and reopen the file
self.fileh.close()
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "After deletion"
print "var1 column:", table.cols.var1
self.assertTrue(table.cols.var1.index is None)
self.assertEqual(table.colindexed["var1"], 0)
# re-create the index again
indexrows = table.cols.var1.create_index(_blocksizes=small_blocksizes)
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
if verbose:
print "After re-creation"
print "var1 column:", table.cols.var1
self.assertTrue(idxcol is not None)
self.assertEqual(table.colindexed["var1"], 1)
def test10a_moveIndex(self):
"""Checking moving a table with an index"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test10a_moveIndex..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Before move"
print "var1 column:", idxcol
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
# Create a new group called "agroup"
agroup = self.fileh.create_group("/", "agroup")
# move the table to "agroup"
table.move(agroup, "table2")
if verbose:
print "After move"
print "var1 column:", idxcol
self.assertTrue(table.cols.var1.index is not None)
self.assertEqual(table.colindexed["var1"], 1)
# Some sanity checks
table.flavor = "python"
rowList1 = table.get_where_list('var1 < b"10"')
rowList2 = [p.nrow for p in table if p['var1'] < b"10"]
if verbose:
print "Selected values:", rowList1
print "Should look like:", rowList2
self.assertEqual(len(rowList1), len(rowList2))
self.assertEqual(rowList1, rowList2)
def test10b_moveIndex(self):
"""Checking moving a table with an index (persistent version)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test10b_moveIndex..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Before move"
print "var1 index column:", idxcol
self.assertTrue(idxcol is not None)
self.assertEqual(table.colindexed["var1"], 1)
# Create a new group called "agroup"
agroup = self.fileh.create_group("/", "agroup")
# move the table to "agroup"
table.move(agroup, "table2")
# close and reopen the file
self.fileh.close()
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.agroup.table2
idxcol = table.cols.var1.index
if verbose:
print "After move"
print "var1 column:", idxcol
self.assertTrue(table.cols.var1.index is not None)
self.assertEqual(table.colindexed["var1"], 1)
# Some sanity checks
table.flavor = "python"
rowList1 = table.get_where_list('var1 < b"10"')
rowList2 = [p.nrow for p in table if p['var1'] < b"10"]
if verbose:
print "Selected values:", rowList1, type(rowList1)
print "Should look like:", rowList2, type(rowList2)
self.assertEqual(len(rowList1), len(rowList2))
self.assertEqual(rowList1, rowList2)
def test10c_moveIndex(self):
"""Checking moving a table with an index (small node cache)."""
if verbose:
print '\n', '-=' * 30
print "Running %s.test10c_moveIndex..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a", node_cache_slots=10)
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Before move"
print "var1 column:", idxcol
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
# Create a new group called "agroup"
agroup = self.fileh.create_group("/", "agroup")
# move the table to "agroup"
table.move(agroup, "table2")
if verbose:
print "After move"
print "var1 column:", idxcol
self.assertTrue(table.cols.var1.index is not None)
self.assertEqual(table.colindexed["var1"], 1)
# Some sanity checks
table.flavor = "python"
rowList1 = table.get_where_list('var1 < b"10"')
rowList2 = [p.nrow for p in table if p['var1'] < b"10"]
if verbose:
print "Selected values:", rowList1
print "Should look like:", rowList2
self.assertEqual(len(rowList1), len(rowList2))
self.assertEqual(rowList1, rowList2)
def test10d_moveIndex(self):
"""Checking moving a table with an index (no node cache)."""
if verbose:
print '\n', '-=' * 30
print "Running %s.test10d_moveIndex..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a", node_cache_slots=0)
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Before move"
print "var1 column:", idxcol
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
# Create a new group called "agroup"
agroup = self.fileh.create_group("/", "agroup")
# move the table to "agroup"
table.move(agroup, "table2")
if verbose:
print "After move"
print "var1 column:", idxcol
self.assertTrue(table.cols.var1.index is not None)
self.assertEqual(table.colindexed["var1"], 1)
# Some sanity checks
table.flavor = "python"
rowList1 = table.get_where_list('var1 < b"10"')
rowList2 = [p.nrow for p in table if p['var1'] < b"10"]
if verbose:
print "Selected values:", rowList1
print "Should look like:", rowList2
self.assertEqual(len(rowList1), len(rowList2))
self.assertEqual(rowList1, rowList2)
def test11a_removeTableWithIndex(self):
"""Checking removing a table with indexes"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test11a_removeTableWithIndex..." % self.__class__.__name__
# Open the HDF5 file in read-write mode
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Before deletion"
print "var1 column:", table.cols.var1
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
# delete the table
self.fileh.remove_node("/table")
if verbose:
print "After deletion"
self.assertTrue("table" not in self.fileh.root)
# re-create the table and the index again
table = self.fileh.create_table("/", 'table', TDescr, "New table",
self.filters, self.nrows)
for i in range(self.nrows):
table.row['var1'] = str(i)
table.row['var2'] = i % 2
table.row['var3'] = i
table.row['var4'] = float(self.nrows - i - 1)
table.row.append()
table.flush()
# Index all entries:
for col in table.colinstances.itervalues():
indexrows = col.create_index(_blocksizes=small_blocksizes)
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
if verbose:
print "After re-creation"
print "var1 column:", table.cols.var1
self.assertTrue(idxcol is not None)
self.assertEqual(table.colindexed["var1"], 1)
def test11b_removeTableWithIndex(self):
"""Checking removing a table with indexes (persistent version 2)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test11b_removeTableWithIndex..." % self.__class__.__name__
self.fileh = open_file(self.file, mode="a")
table = self.fileh.root.table
idxcol = table.cols.var1.index
if verbose:
print "Before deletion"
print "var1 column:", table.cols.var1
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
# delete the table
self.fileh.remove_node("/table")
if verbose:
print "After deletion"
self.assertTrue("table" not in self.fileh.root)
# close and reopen the file
self.fileh.close()
self.fileh = open_file(self.file, mode="r+")
# re-create the table and the index again
table = self.fileh.create_table("/", 'table', TDescr, "New table",
self.filters, self.nrows)
for i in range(self.nrows):
table.row['var1'] = str(i)
table.row['var2'] = i % 2
table.row['var3'] = i
table.row['var4'] = float(self.nrows - i - 1)
table.row.append()
table.flush()
# Index all entries:
for col in table.colinstances.itervalues():
indexrows = col.create_index(_blocksizes=small_blocksizes)
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
if verbose:
print "After re-creation"
print "var1 column:", table.cols.var1
self.assertTrue(idxcol is not None)
self.assertEqual(table.colindexed["var1"], 1)
# Test provided by Andrew Straw
def test11c_removeTableWithIndex(self):
"""Checking removing a table with indexes (persistent version 3)"""
if verbose:
print '\n', '-=' * 30
print "Running %s.test11c_removeTableWithIndex..." % self.__class__.__name__
class Distance(IsDescription):
frame = Int32Col(pos=0)
distance = FloatCol(pos=1)
# Delete the old temporal file
os.remove(self.file)
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, mode='w')
table = self.fileh.create_table(
self.fileh.root, 'distance_table', Distance)
table.cols.frame.create_index(_blocksizes=small_blocksizes)
r = table.row
for i in range(10):
r['frame'] = i
r['distance'] = float(i**2)
r.append()
table.flush()
self.fileh.close()
self.fileh = open_file(self.file, mode='r+')
self.fileh.remove_node(self.fileh.root.distance_table)
small_ss = small_blocksizes[2]
class BasicReadTestCase(BasicTestCase):
compress = 0
complib = "zlib"
shuffle = 0
fletcher32 = 0
nrows = small_ss
class ZlibReadTestCase(BasicTestCase):
compress = 1
complib = "zlib"
shuffle = 0
fletcher32 = 0
nrows = small_ss
class BloscReadTestCase(BasicTestCase):
compress = 1
complib = "blosc"
shuffle = 0
fletcher32 = 0
nrows = small_ss
class LZOReadTestCase(BasicTestCase):
compress = 1
complib = "lzo"
shuffle = 0
fletcher32 = 0
nrows = small_ss
class Bzip2ReadTestCase(BasicTestCase):
compress = 1
complib = "bzip2"
shuffle = 0
fletcher32 = 0
nrows = small_ss
class ShuffleReadTestCase(BasicTestCase):
compress = 1
complib = "zlib"
shuffle = 1
fletcher32 = 0
nrows = small_ss
class Fletcher32ReadTestCase(BasicTestCase):
compress = 1
complib = "zlib"
shuffle = 0
fletcher32 = 1
nrows = small_ss
class ShuffleFletcher32ReadTestCase(BasicTestCase):
compress = 1
complib = "zlib"
shuffle = 1
fletcher32 = 1
nrows = small_ss
class OneHalfTestCase(BasicTestCase):
nrows = small_ss + small_ss//2
class UpperBoundTestCase(BasicTestCase):
nrows = small_ss + 1
class LowerBoundTestCase(BasicTestCase):
nrows = small_ss * 2-1
class DeepTableIndexTestCase(unittest.TestCase):
nrows = minRowIndex
def test01(self):
"Checking the indexing of a table in a 2nd level hierarchy"
# Create an instance of an HDF5 Table
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, "w")
group = self.fileh.create_group(self.fileh.root, "agroup")
# Create a table
title = "This is the IndexArray title"
table = self.fileh.create_table(group, 'table', TDescr, title,
None, self.nrows)
for i in range(self.nrows):
# Fill rows with defaults
table.row.append()
table.flush()
# Index some column
indexrows = table.cols.var1.create_index()
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
# Some sanity checks
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
self.assertEqual(idxcol.nelements, self.nrows)
self.fileh.close()
os.remove(self.file)
def test01b(self):
"Checking the indexing of a table in 2nd level (persistent version)"
# Create an instance of an HDF5 Table
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, "w")
group = self.fileh.create_group(self.fileh.root, "agroup")
# Create a table
title = "This is the IndexArray title"
table = self.fileh.create_table(group, 'table', TDescr, title,
None, self.nrows)
for i in range(self.nrows):
# Fill rows with defaults
table.row.append()
table.flush()
# Index some column
indexrows = table.cols.var1.create_index()
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
# Close and re-open this file
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.agroup.table
idxcol = table.cols.var1.index
# Some sanity checks
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
self.assertEqual(idxcol.nelements, self.nrows)
self.fileh.close()
os.remove(self.file)
def test02(self):
"Checking the indexing of a table in a 4th level hierarchy"
# Create an instance of an HDF5 Table
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, "w")
group = self.fileh.create_group(self.fileh.root, "agroup")
group = self.fileh.create_group(group, "agroup")
group = self.fileh.create_group(group, "agroup")
# Create a table
title = "This is the IndexArray title"
table = self.fileh.create_table(group, 'table', TDescr, title,
None, self.nrows)
for i in range(self.nrows):
# Fill rows with defaults
table.row.append()
table.flush()
# Index some column
indexrows = table.cols.var1.create_index()
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
# Some sanity checks
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
self.assertEqual(idxcol.nelements, self.nrows)
self.fileh.close()
os.remove(self.file)
def test02b(self):
"Checking the indexing of a table in a 4th level (persistent version)"
# Create an instance of an HDF5 Table
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, "w")
group = self.fileh.create_group(self.fileh.root, "agroup")
group = self.fileh.create_group(group, "agroup")
group = self.fileh.create_group(group, "agroup")
# Create a table
title = "This is the IndexArray title"
table = self.fileh.create_table(group, 'table', TDescr, title,
None, self.nrows)
for i in range(self.nrows):
# Fill rows with defaults
table.row.append()
table.flush()
# Index some column
indexrows = table.cols.var1.create_index()
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
# Close and re-open this file
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.agroup.agroup.agroup.table
idxcol = table.cols.var1.index
# Some sanity checks
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
self.assertEqual(idxcol.nelements, self.nrows)
self.fileh.close()
os.remove(self.file)
def test03(self):
"Checking the indexing of a table in a 100th level hierarchy"
# Create an instance of an HDF5 Table
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, "w")
group = self.fileh.root
for i in range(100):
group = self.fileh.create_group(group, "agroup")
# Create a table
title = "This is the IndexArray title"
table = self.fileh.create_table(group, 'table', TDescr, title,
None, self.nrows)
for i in range(self.nrows):
# Fill rows with defaults
table.row.append()
table.flush()
# Index some column
indexrows = table.cols.var1.create_index()
self.assertTrue(indexrows is not None)
idxcol = table.cols.var1.index
# Some sanity checks
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(idxcol is not None)
self.assertEqual(idxcol.nelements, self.nrows)
self.fileh.close()
os.remove(self.file)
class IndexProps(object):
def __init__(self, auto=default_auto_index, filters=default_index_filters):
self.auto = auto
self.filters = filters
DefaultProps = IndexProps()
NoAutoProps = IndexProps(auto=False)
ChangeFiltersProps = IndexProps(
filters=Filters(complevel=6, complib="zlib",
shuffle=False, fletcher32=False))
class AutomaticIndexingTestCase(unittest.TestCase):
reopen = 1
iprops = NoAutoProps
colsToIndex = ['var1', 'var2', 'var3']
small_blocksizes = (16, 8, 4, 2)
def setUp(self):
# Create an instance of an HDF5 Table
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, "w")
# Create a table
title = "This is the IndexArray title"
root = self.fileh.root
# Make the chunkshape smaller or equal than small_blocksizes[-1]
chunkshape = (2,)
self.table = self.fileh.create_table(root, 'table', TDescr, title,
None, self.nrows,
chunkshape=chunkshape)
self.table.autoindex = self.iprops.auto
for colname in self.colsToIndex:
self.table.colinstances[colname].create_index(
_blocksizes=self.small_blocksizes)
for i in range(self.nrows):
# Fill rows with defaults
self.table.row.append()
self.table.flush()
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
self.table = self.fileh.root.table
def tearDown(self):
self.fileh.close()
os.remove(self.file)
cleanup(self)
def test01_attrs(self):
"Checking indexing attributes (part1)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test01_attrs..." % self.__class__.__name__
table = self.table
if self.iprops is DefaultProps:
self.assertEqual(table.indexed, 0)
else:
self.assertEqual(table.indexed, 1)
if self.iprops is DefaultProps:
self.assertEqual(table.colindexed["var1"], 0)
self.assertTrue(table.cols.var1.index is None)
self.assertEqual(table.colindexed["var2"], 0)
self.assertTrue(table.cols.var2.index is None)
self.assertEqual(table.colindexed["var3"], 0)
self.assertTrue(table.cols.var3.index is None)
self.assertEqual(table.colindexed["var4"], 0)
self.assertTrue(table.cols.var4.index is None)
else:
# Check that the var1, var2 and var3 (and only these)
# has been indexed
self.assertEqual(table.colindexed["var1"], 1)
self.assertTrue(table.cols.var1.index is not None)
self.assertEqual(table.colindexed["var2"], 1)
self.assertTrue(table.cols.var2.index is not None)
self.assertEqual(table.colindexed["var3"], 1)
self.assertTrue(table.cols.var3.index is not None)
self.assertEqual(table.colindexed["var4"], 0)
self.assertTrue(table.cols.var4.index is None)
def test02_attrs(self):
"Checking indexing attributes (part2)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test02_attrs..." % self.__class__.__name__
table = self.table
# Check the policy parameters
if verbose:
if table.indexed:
print "index props:", table.autoindex
else:
print "Table is not indexed"
# Check non-default values for index saving policy
if self.iprops is NoAutoProps:
self.assertFalse(table.autoindex)
elif self.iprops is ChangeFiltersProps:
self.assertTrue(table.autoindex)
# Check Index() objects exists and are properly placed
if self.iprops is DefaultProps:
self.assertEqual(table.cols.var1.index, None)
self.assertEqual(table.cols.var2.index, None)
self.assertEqual(table.cols.var3.index, None)
self.assertEqual(table.cols.var4.index, None)
else:
self.assertTrue(isinstance(table.cols.var1.index, Index))
self.assertTrue(isinstance(table.cols.var2.index, Index))
self.assertTrue(isinstance(table.cols.var3.index, Index))
self.assertEqual(table.cols.var4.index, None)
def test03_counters(self):
"Checking indexing counters"
if verbose:
print '\n', '-=' * 30
print "Running %s.test03_counters..." % self.__class__.__name__
table = self.table
# Check the counters for indexes
if verbose:
if table.indexed:
print "indexedrows:", table._indexedrows
print "unsavedindexedrows:", table._unsaved_indexedrows
index = table.cols.var1.index
print "table rows:", table.nrows
print "computed indexed rows:", index.nrows * index.slicesize
else:
print "Table is not indexed"
if self.iprops is not DefaultProps:
index = table.cols.var1.index
indexedrows = index.nelements
self.assertEqual(table._indexedrows, indexedrows)
indexedrows = index.nelements
self.assertEqual(table._unsaved_indexedrows,
self.nrows - indexedrows)
def test04_noauto(self):
"Checking indexing counters (non-automatic mode)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test04_noauto..." % self.__class__.__name__
table = self.table
# Force a sync in indexes
table.flush_rows_to_index()
# Check the counters for indexes
if verbose:
if table.indexed:
print "indexedrows:", table._indexedrows
print "unsavedindexedrows:", table._unsaved_indexedrows
index = table.cols.var1.index
print "computed indexed rows:", index.nelements
else:
print "Table is not indexed"
# No unindexated rows should remain
index = table.cols.var1.index
if self.iprops is DefaultProps:
self.assertTrue(index is None)
else:
indexedrows = index.nelements
self.assertEqual(table._indexedrows, index.nelements)
self.assertEqual(table._unsaved_indexedrows,
self.nrows - indexedrows)
# Check non-default values for index saving policy
if self.iprops is NoAutoProps:
self.assertFalse(table.autoindex)
elif self.iprops is ChangeFiltersProps:
self.assertTrue(table.autoindex)
def test05_icounters(self):
"Checking indexing counters (remove_rows)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test05_icounters..." % self.__class__.__name__
table = self.table
# Force a sync in indexes
table.flush_rows_to_index()
# Non indexated rows should remain here
if self.iprops is not DefaultProps:
indexedrows = table._indexedrows
unsavedindexedrows = table._unsaved_indexedrows
# Now, remove some rows:
table.remove_rows(2, 4)
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
# Check the counters for indexes
if verbose:
if table.indexed:
print "indexedrows:", table._indexedrows
print "original indexedrows:", indexedrows
print "unsavedindexedrows:", table._unsaved_indexedrows
print "original unsavedindexedrows:", unsavedindexedrows
# index = table.cols.var1.index
print "index dirty:", table.cols.var1.index.dirty
else:
print "Table is not indexed"
# Check the counters
self.assertEqual(table.nrows, self.nrows - 2)
if self.iprops is NoAutoProps:
self.assertTrue(table.cols.var1.index.dirty)
# Check non-default values for index saving policy
if self.iprops is NoAutoProps:
self.assertFalse(table.autoindex)
elif self.iprops is ChangeFiltersProps:
self.assertTrue(table.autoindex)
def test06_dirty(self):
"Checking dirty flags (remove_rows action)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test06_dirty..." % self.__class__.__name__
table = self.table
# Force a sync in indexes
table.flush_rows_to_index()
# Now, remove some rows:
table.remove_rows(3, 5)
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
# Check the dirty flag for indexes
if verbose:
print "auto flag:", table.autoindex
for colname in table.colnames:
if table.cols._f_col(colname).index:
print "dirty flag col %s: %s" % \
(colname, table.cols._f_col(colname).index.dirty)
# Check the flags
for colname in table.colnames:
if table.cols._f_col(colname).index:
if not table.autoindex:
self.assertEqual(table.cols._f_col(colname).index.dirty,
True)
else:
self.assertEqual(table.cols._f_col(colname).index.dirty,
False)
def test07_noauto(self):
"Checking indexing counters (modify_rows, no-auto mode)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test07_noauto..." % self.__class__.__name__
table = self.table
# Force a sync in indexes
table.flush_rows_to_index()
# No unindexated rows should remain here
if self.iprops is not DefaultProps:
indexedrows = table._indexedrows
unsavedindexedrows = table._unsaved_indexedrows
# Now, modify just one row:
table.modify_rows(3, None, 1, [["asa", 0, 3, 3.1]])
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
# Check the counters for indexes
if verbose:
if table.indexed:
print "indexedrows:", table._indexedrows
print "original indexedrows:", indexedrows
print "unsavedindexedrows:", table._unsaved_indexedrows
print "original unsavedindexedrows:", unsavedindexedrows
index = table.cols.var1.index
print "computed indexed rows:", index.nelements
else:
print "Table is not indexed"
# Check the counters
self.assertEqual(table.nrows, self.nrows)
if self.iprops is NoAutoProps:
self.assertTrue(table.cols.var1.index.dirty)
# Check the dirty flag for indexes
if verbose:
for colname in table.colnames:
if table.cols._f_col(colname).index:
print "dirty flag col %s: %s" % \
(colname, table.cols._f_col(colname).index.dirty)
for colname in table.colnames:
if table.cols._f_col(colname).index:
if not table.autoindex:
self.assertEqual(table.cols._f_col(colname).index.dirty,
True)
else:
self.assertEqual(table.cols._f_col(colname).index.dirty,
False)
def test07b_noauto(self):
"Checking indexing queries (modify in iterator, no-auto mode)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test07b_noauto..." % self.__class__.__name__
table = self.table
# Force a sync in indexes
table.flush_rows_to_index()
# Do a query that uses indexes
res = [row.nrow for row in table.where('(var2 == True) & (var3 > 0)')]
# Now, modify just one row:
for row in table:
if row.nrow == 3:
row['var1'] = "asa"
row['var2'] = True
row['var3'] = 3
row['var4'] = 3.1
row.update()
table.flush()
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
# Do a query that uses indexes
resq = [row.nrow for row in table.where('(var2 == True) & (var3 > 0)')]
res_ = res + [3]
if verbose:
print "AutoIndex?:", table.autoindex
print "Query results (original):", res
print "Query results (after modifying table):", resq
print "Should look like:", res_
self.assertEqual(res_, resq)
def test07c_noauto(self):
"Checking indexing queries (append, no-auto mode)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test07c_noauto..." % self.__class__.__name__
table = self.table
# Force a sync in indexes
table.flush_rows_to_index()
# Do a query that uses indexes
res = [row.nrow for row in table.where('(var2 == True) & (var3 > 0)')]
# Now, append three rows
table.append([("asa", True, 1, 3.1)])
table.append([("asb", True, 2, 3.1)])
table.append([("asc", True, 3, 3.1)])
table.flush()
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
# Do a query that uses indexes
resq = [row.nrow for row in table.where('(var2 == True) & (var3 > 0)')]
res_ = res + [table.nrows-3, table.nrows-2, table.nrows-1]
if verbose:
print "AutoIndex?:", table.autoindex
print "Query results (original):", res
print "Query results (after modifying table):", resq
print "Should look like:", res_
self.assertEqual(res_, resq)
def test08_dirty(self):
"Checking dirty flags (modify_columns)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test08_dirty..." % self.__class__.__name__
table = self.table
# Force a sync in indexes
table.flush_rows_to_index()
# Non indexated rows should remain here
if self.iprops is not DefaultProps:
indexedrows = table._indexedrows
self.assertTrue(indexedrows is not None)
unsavedindexedrows = table._unsaved_indexedrows
self.assertTrue(unsavedindexedrows is not None)
# Now, modify a couple of rows:
table.modify_columns(1, columns=[["asa", "asb"], [1., 2.]],
names=["var1", "var4"])
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
# Check the counters
self.assertEqual(table.nrows, self.nrows)
if self.iprops is NoAutoProps:
self.assertTrue(table.cols.var1.index.dirty)
# Check the dirty flag for indexes
if verbose:
for colname in table.colnames:
if table.cols._f_col(colname).index:
print "dirty flag col %s: %s" % \
(colname, table.cols._f_col(colname).index.dirty)
for colname in table.colnames:
if table.cols._f_col(colname).index:
if not table.autoindex:
if colname in ["var1"]:
self.assertEqual(
table.cols._f_col(colname).index.dirty, True)
else:
self.assertEqual(
table.cols._f_col(colname).index.dirty, False)
else:
self.assertEqual(table.cols._f_col(colname).index.dirty,
False)
def test09a_propIndex(self):
"Checking propagate Index feature in Table.copy() (attrs)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test09a_propIndex..." % self.__class__.__name__
table = self.table
# Don't force a sync in indexes
# table.flush_rows_to_index()
# Non indexated rows should remain here
if self.iprops is not DefaultProps:
indexedrows = table._indexedrows
self.assertTrue(indexedrows is not None)
unsavedindexedrows = table._unsaved_indexedrows
self.assertTrue(unsavedindexedrows is not None)
# Now, remove some rows to make columns dirty
# table.remove_rows(3,5)
# Copy a Table to another location
table2 = table.copy("/", 'table2', propindexes=True)
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
table2 = self.fileh.root.table2
index1 = table.cols.var1.index
index2 = table2.cols.var1.index
if verbose:
print "Copied index:", index2
print "Original index:", index1
if index1:
print "Elements in copied index:", index2.nelements
print "Elements in original index:", index1.nelements
# Check the counters
self.assertEqual(table.nrows, table2.nrows)
if table.indexed:
self.assertTrue(table2.indexed)
if self.iprops is DefaultProps:
# No index: the index should not exist
self.assertTrue(index1 is None)
self.assertTrue(index2 is None)
elif self.iprops is NoAutoProps:
self.assertTrue(index2 is not None)
# Check the dirty flag for indexes
if verbose:
for colname in table2.colnames:
if table2.cols._f_col(colname).index:
print "dirty flag col %s: %s" % \
(colname, table2.cols._f_col(colname).index.dirty)
for colname in table2.colnames:
if table2.cols._f_col(colname).index:
self.assertEqual(table2.cols._f_col(colname).index.dirty,
False)
def test09b_propIndex(self):
"Checking that propindexes=False works"
if verbose:
print '\n', '-=' * 30
print "Running %s.test09b_propIndex..." % self.__class__.__name__
table = self.table
# Don't force a sync in indexes
# table.flush_rows_to_index()
# Non indexated rows should remain here
if self.iprops is not DefaultProps:
indexedrows = table._indexedrows
self.assertTrue(indexedrows is not None)
unsavedindexedrows = table._unsaved_indexedrows
self.assertTrue(unsavedindexedrows is not None)
# Now, remove some rows to make columns dirty
# table.remove_rows(3,5)
# Copy a Table to another location
table2 = table.copy("/", 'table2', propindexes=False)
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
table2 = self.fileh.root.table2
if verbose:
print "autoindex?:", self.iprops.auto
print "Copied index indexed?:", table2.cols.var1.is_indexed
print "Original index indexed?:", table.cols.var1.is_indexed
if self.iprops is DefaultProps:
# No index: the index should not exist
self.assertFalse(table2.cols.var1.is_indexed)
self.assertFalse(table.cols.var1.is_indexed)
elif self.iprops is NoAutoProps:
self.assertFalse(table2.cols.var1.is_indexed)
self.assertTrue(table.cols.var1.is_indexed)
def test10_propIndex(self):
"Checking propagate Index feature in Table.copy() (values)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test10_propIndex..." % self.__class__.__name__
table = self.table
# Don't force a sync in indexes
# table.flush_rows_to_index()
# Non indexated rows should remain here
if self.iprops is not DefaultProps:
indexedrows = table._indexedrows
self.assertTrue(indexedrows is not None)
unsavedindexedrows = table._unsaved_indexedrows
self.assertTrue(unsavedindexedrows is not None)
# Now, remove some rows to make columns dirty
# table.remove_rows(3,5)
# Copy a Table to another location
table2 = table.copy("/", 'table2', propindexes=True)
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
table2 = self.fileh.root.table2
index1 = table.cols.var3.index
index2 = table2.cols.var3.index
if verbose:
print "Copied index:", index2
print "Original index:", index1
if index1:
print "Elements in copied index:", index2.nelements
print "Elements in original index:", index1.nelements
def test11_propIndex(self):
"Checking propagate Index feature in Table.copy() (dirty flags)"
if verbose:
print '\n', '-=' * 30
print "Running %s.test11_propIndex..." % self.__class__.__name__
table = self.table
# Force a sync in indexes
table.flush_rows_to_index()
# Non indexated rows should remain here
if self.iprops is not DefaultProps:
indexedrows = table._indexedrows
self.assertTrue(indexedrows is not None)
unsavedindexedrows = table._unsaved_indexedrows
self.assertTrue(unsavedindexedrows is not None)
# Now, modify an indexed column and an unindexed one
# to make the "var1" dirty
table.modify_columns(1, columns=[["asa", "asb"], [1., 2.]],
names=["var1", "var4"])
# Copy a Table to another location
table2 = table.copy("/", 'table2', propindexes=True)
if self.reopen:
self.fileh.close()
self.fileh = open_file(self.file, "a")
table = self.fileh.root.table
table2 = self.fileh.root.table2
index1 = table.cols.var1.index
index2 = table2.cols.var1.index
if verbose:
print "Copied index:", index2
print "Original index:", index1
if index1:
print "Elements in copied index:", index2.nelements
print "Elements in original index:", index1.nelements
# Check the dirty flag for indexes
if verbose:
for colname in table2.colnames:
if table2.cols._f_col(colname).index:
print "dirty flag col %s: %s" % \
(colname, table2.cols._f_col(colname).index.dirty)
for colname in table2.colnames:
if table2.cols._f_col(colname).index:
if table2.autoindex:
# All the destination columns should be non-dirty because
# the copy removes the dirty state and puts the
# index in a sane state
self.assertEqual(table2.cols._f_col(colname).index.dirty,
False)
# minRowIndex = 10000 # just if one wants more indexed rows to be checked
class AI1TestCase(AutomaticIndexingTestCase):
# nrows = 10002
nrows = 102
reopen = 0
iprops = NoAutoProps
colsToIndex = ['var1', 'var2', 'var3']
class AI2TestCase(AutomaticIndexingTestCase):
# nrows = 10002
nrows = 102
reopen = 1
iprops = NoAutoProps
colsToIndex = ['var1', 'var2', 'var3']
class AI4bTestCase(AutomaticIndexingTestCase):
# nrows = 10012
nrows = 112
reopen = 1
iprops = NoAutoProps
colsToIndex = ['var1', 'var2', 'var3']
class AI5TestCase(AutomaticIndexingTestCase):
sbs, bs, ss, cs = calc_chunksize(minRowIndex, memlevel=1)
nrows = ss * 11-1
reopen = 0
iprops = NoAutoProps
colsToIndex = ['var1', 'var2', 'var3']
class AI6TestCase(AutomaticIndexingTestCase):
sbs, bs, ss, cs = calc_chunksize(minRowIndex, memlevel=1)
nrows = ss * 21 + 1
reopen = 1
iprops = NoAutoProps
colsToIndex = ['var1', 'var2', 'var3']
class AI7TestCase(AutomaticIndexingTestCase):
sbs, bs, ss, cs = calc_chunksize(minRowIndex, memlevel=1)
nrows = ss * 12-1
# nrows = ss * 1-1 # faster test
reopen = 0
iprops = NoAutoProps
colsToIndex = ['var1', 'var2', 'var3']
class AI8TestCase(AutomaticIndexingTestCase):
sbs, bs, ss, cs = calc_chunksize(minRowIndex, memlevel=1)
nrows = ss * 15 + 100
# nrows = ss * 1 + 100 # faster test
reopen = 1
iprops = NoAutoProps
colsToIndex = ['var1', 'var2', 'var3']
class AI9TestCase(AutomaticIndexingTestCase):
sbs, bs, ss, cs = calc_chunksize(minRowIndex, memlevel=1)
nrows = ss
reopen = 0
iprops = DefaultProps
colsToIndex = []
class AI10TestCase(AutomaticIndexingTestCase):
# nrows = 10002
nrows = 102
reopen = 1
iprops = DefaultProps
colsToIndex = []
class AI11TestCase(AutomaticIndexingTestCase):
# nrows = 10002
nrows = 102
reopen = 0
iprops = ChangeFiltersProps
colsToIndex = ['var1', 'var2', 'var3']
class AI12TestCase(AutomaticIndexingTestCase):
# nrows = 10002
nrows = 102
reopen = 0
iprops = ChangeFiltersProps
colsToIndex = ['var1', 'var2', 'var3']
class ManyNodesTestCase(PyTablesTestCase):
def setUp(self):
self.file = tempfile.mktemp(".h5")
self.fileh = open_file(self.file, "w", node_cache_slots=64)
def test00(self):
"""Indexing many nodes in one single session (based on bug #26)"""
IdxRecord = {
'f0': Int8Col(),
'f1': Int8Col(),
'f2': Int8Col(),
}
h5 = self.fileh
for qn in range(5):
for sn in range(5):
qchr = 'chr' + str(qn)
name = 'chr' + str(sn)
path = "/at/%s/pt" % (qchr)
table = h5.create_table(path, name, IdxRecord, createparents=1)
table.cols.f0.create_index()
table.cols.f1.create_index()
table.cols.f2.create_index()
table.row.append()
table.flush()
def tearDown(self):
self.fileh.close()
os.remove(self.file)
cleanup(self)
class IndexPropsChangeTestCase(TempFileMixin, PyTablesTestCase):
"""Test case for changing index properties in a table."""
class MyDescription(IsDescription):
icol = IntCol()
oldIndexProps = IndexProps()
newIndexProps = IndexProps(auto=False, filters=Filters(complevel=9))
def setUp(self):
super(IndexPropsChangeTestCase, self).setUp()
table = self.h5file.create_table('/', 'test', self.MyDescription)
table.autoindex = self.oldIndexProps.auto
row = table.row
for i in xrange(100):
row['icol'] = i % 25
row.append()
table.flush()
self.table = table
def tearDown(self):
super(IndexPropsChangeTestCase, self).tearDown()
def test_attributes(self):
"""Storing index properties as table attributes."""
for refprops in [self.oldIndexProps, self.newIndexProps]:
self.assertEqual(self.table.autoindex, refprops.auto)
self.table.autoindex = self.newIndexProps.auto
def test_copyattrs(self):
"""Copying index properties attributes."""
oldtable = self.table
newtable = oldtable.copy('/', 'test2')
self.assertEqual(oldtable.autoindex, newtable.autoindex)
class IndexFiltersTestCase(TempFileMixin, PyTablesTestCase):
"""Test case for setting index filters."""
def setUp(self):
super(IndexFiltersTestCase, self).setUp()
description = {'icol': IntCol()}
self.table = self.h5file.create_table('/', 'test', description)
def test_createIndex(self):
"""Checking input parameters in new indexes."""
# Different from default.
argfilters = copy.copy(default_index_filters)
argfilters.shuffle = not default_index_filters.shuffle
# Different both from default and the previous one.
idxfilters = copy.copy(default_index_filters)
idxfilters.shuffle = not default_index_filters.shuffle
idxfilters.fletcher32 = not default_index_filters.fletcher32
icol = self.table.cols.icol
# First create
icol.create_index(kind='ultralight', optlevel=4)
self.assertEqual(icol.index.kind, 'ultralight')
self.assertEqual(icol.index.optlevel, 4)
self.assertEqual(icol.index.filters, default_index_filters)
icol.remove_index()
# Second create
icol.create_index(kind='medium', optlevel=3, filters=argfilters)
self.assertEqual(icol.index.kind, 'medium')
self.assertEqual(icol.index.optlevel, 3)
self.assertEqual(icol.index.filters, argfilters)
icol.remove_index()
def test_reindex(self):
"""Checking input parameters in recomputed indexes."""
icol = self.table.cols.icol
icol.create_index(
kind='full', optlevel=5, filters=Filters(complevel=3))
kind = icol.index.kind
optlevel = icol.index.optlevel
filters = icol.index.filters
icol.reindex()
ni = icol.index
if verbose:
print "Old parameters: %s, %s, %s" % (kind, optlevel, filters)
print "New parameters: %s, %s, %s" % (
ni.kind, ni.optlevel, ni.filters)
self.assertEqual(ni.kind, kind)
self.assertEqual(ni.optlevel, optlevel)
self.assertEqual(ni.filters, filters)
class OldIndexTestCase(PyTablesTestCase):
def test1_x(self):
"""Check that files with 1.x indexes are recognized and warned."""
fname = self._testFilename("idx-std-1.x.h5")
f = open_file(fname)
self.assertWarns(OldIndexWarning, f.get_node, "/table")
f.close()
# Sensible parameters for indexing with small blocksizes
small_blocksizes = (512, 128, 32, 8)
class CompletelySortedIndexTestCase(TempFileMixin, PyTablesTestCase):
"""Test case for testing a complete sort in a table."""
nrows = 100
nrowsinbuf = 11
class MyDescription(IsDescription):
rcol = IntCol(pos=1)
icol = IntCol(pos=2)
def setUp(self):
super(CompletelySortedIndexTestCase, self).setUp()
table = self.h5file.create_table('/', 'table', self.MyDescription)
row = table.row
nrows = self.nrows
for i in xrange(nrows):
row['rcol'] = i
row['icol'] = nrows - i
row.append()
table.flush()
self.table = table
self.icol = self.table.cols.icol
# A full index with maximum optlevel should always be completely sorted
self.icol.create_csindex(_blocksizes=small_blocksizes)
def test00_isCompletelySortedIndex(self):
"""Testing the Column.is_csi property."""
icol = self.icol
self.assertEqual(icol.index.is_csi, True)
icol.remove_index()
# Other kinds than full, should never return a CSI
icol.create_index(kind="medium", optlevel=9)
self.assertEqual(icol.index.is_csi, False)
icol.remove_index()
# As the table is small, lesser optlevels should be able to
# create a completely sorted index too.
icol.create_index(kind="full", optlevel=6)
self.assertEqual(icol.index.is_csi, True)
# Checking a CSI in a sorted copy
self.table.copy("/", 'table2', sortby='icol', checkCSI=True)
self.assertEqual(icol.index.is_csi, True)
def test01_readSorted1(self):
"""Testing the Index.read_sorted() method with no arguments."""
icol = self.icol
sortedcol = numpy.sort(icol[:])
sortedcol2 = icol.index.read_sorted()
if verbose:
print "Original sorted column:", sortedcol
print "The values from the index:", sortedcol2
self.assertTrue(allequal(sortedcol, sortedcol2))
def test01_readSorted2(self):
"""Testing the Index.read_sorted() method with arguments (I)."""
icol = self.icol
sortedcol = numpy.sort(icol[:])[30:55]
sortedcol2 = icol.index.read_sorted(30, 55)
if verbose:
print "Original sorted column:", sortedcol
print "The values from the index:", sortedcol2
self.assertTrue(allequal(sortedcol, sortedcol2))
def test01_readSorted3(self):
"""Testing the Index.read_sorted() method with arguments (II)."""
icol = self.icol
sortedcol = numpy.sort(icol[:])[33:97]
sortedcol2 = icol.index.read_sorted(33, 97)
if verbose:
print "Original sorted column:", sortedcol
print "The values from the index:", sortedcol2
self.assertTrue(allequal(sortedcol, sortedcol2))
def test02_readIndices1(self):
"""Testing the Index.read_indices() method with no arguments."""
icol = self.icol
indicescol = numpy.argsort(icol[:]).astype('uint64')
indicescol2 = icol.index.read_indices()
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test02_readIndices2(self):
"""Testing the Index.read_indices() method with arguments (I)."""
icol = self.icol
indicescol = numpy.argsort(icol[:])[30:55].astype('uint64')
indicescol2 = icol.index.read_indices(30, 55)
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test02_readIndices3(self):
"""Testing the Index.read_indices() method with arguments (II)."""
icol = self.icol
indicescol = numpy.argsort(icol[:])[33:97].astype('uint64')
indicescol2 = icol.index.read_indices(33, 97)
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test02_readIndices4(self):
"""Testing the Index.read_indices() method with arguments (III)."""
icol = self.icol
indicescol = numpy.argsort(icol[:])[33:97:2].astype('uint64')
indicescol2 = icol.index.read_indices(33, 97, 2)
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test02_readIndices5(self):
"""Testing the Index.read_indices() method with arguments (IV)."""
icol = self.icol
indicescol = numpy.argsort(icol[:])[33:55:5].astype('uint64')
indicescol2 = icol.index.read_indices(33, 55, 5)
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test02_readIndices6(self):
"""Testing the Index.read_indices() method with step only."""
icol = self.icol
indicescol = numpy.argsort(icol[:])[::3].astype('uint64')
indicescol2 = icol.index.read_indices(step=3)
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test03_getitem1(self):
"""Testing the Index.__getitem__() method with no arguments."""
icol = self.icol
indicescol = numpy.argsort(icol[:]).astype('uint64')
indicescol2 = icol.index[:]
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test03_getitem2(self):
"""Testing the Index.__getitem__() method with start."""
icol = self.icol
indicescol = numpy.argsort(icol[:])[31].astype('uint64')
indicescol2 = icol.index[31]
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test03_getitem3(self):
"""Testing the Index.__getitem__() method with start, stop."""
icol = self.icol
indicescol = numpy.argsort(icol[:])[2:16].astype('uint64')
indicescol2 = icol.index[2:16]
if verbose:
print "Original indices column:", indicescol
print "The values from the index:", indicescol2
self.assertTrue(allequal(indicescol, indicescol2))
def test04_itersorted1(self):
"""Testing the Table.itersorted() method with no arguments."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol')], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test04_itersorted2(self):
"""Testing the Table.itersorted() method with a start."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[15:]
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol', start=15)], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test04_itersorted3(self):
"""Testing the Table.itersorted() method with a stop."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[:20]
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol', stop=20)], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test04_itersorted4(self):
"""Testing the Table.itersorted() method with a start and stop."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[15:20]
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol', start=15, stop=20)], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test04_itersorted5(self):
"""Testing the Table.itersorted() method with a start, stop and step."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[15:45:4]
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol', start=15, stop=45, step=4)], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test04_itersorted6(self):
"""Testing the Table.itersorted() method with a start, stop and step."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[33:55:5]
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol', start=33, stop=55, step=5)], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test04_itersorted7(self):
"""Testing the Table.itersorted() method with checkCSI=True."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol', checkCSI=True)], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test04_itersorted8(self):
"""Testing the Table.itersorted() method with a start, stop and
negative step."""
# see also gh-252
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[55:33:-5]
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol', start=55, stop=33, step=-5)], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test04_itersorted9(self):
"""Testing the Table.itersorted() method with a negative step."""
# see also gh-252
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[::-5]
sortedtable2 = numpy.array(
[row.fetch_all_fields() for row in table.itersorted(
'icol', step=-5)], dtype=table._v_dtype)
if verbose:
print "Original sorted table:", sortedtable
print "The values from the iterator:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted1(self):
"""Testing the Table.read_sorted() method with no arguments."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = table.read_sorted('icol')
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted2(self):
"""Testing the Table.read_sorted() method with a start."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[16:17]
sortedtable2 = table.read_sorted('icol', start=16)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted3(self):
"""Testing the Table.read_sorted() method with a start and stop."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[16:33]
sortedtable2 = table.read_sorted('icol', start=16, stop=33)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted4(self):
"""Testing the Table.read_sorted() method with a start, stop and step."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[33:55:5]
sortedtable2 = table.read_sorted('icol', start=33, stop=55, step=5)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted5(self):
"""Testing the Table.read_sorted() method with only a step."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[::3]
sortedtable2 = table.read_sorted('icol', step=3)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted6(self):
"""Testing the Table.read_sorted() method with negative step."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[::-1]
sortedtable2 = table.read_sorted('icol', step=-1)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted7(self):
"""Testing the Table.read_sorted() method with negative step (II)."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')[::-2]
sortedtable2 = table.read_sorted('icol', step=-2)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted8(self):
"""Testing the Table.read_sorted() method with negative step (III))."""
table = self.table
sstart = 100-24-1
sstop = 100-54-1
sortedtable = numpy.sort(table[:], order='icol')[sstart:sstop:-1]
sortedtable2 = table.read_sorted('icol', start=24, stop=54, step=-1)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted9(self):
"""Testing the Table.read_sorted() method with negative step (IV))."""
table = self.table
sstart = 100-14-1
sstop = 100-54-1
sortedtable = numpy.sort(table[:], order='icol')[sstart:sstop:-3]
sortedtable2 = table.read_sorted('icol', start=14, stop=54, step=-3)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted10(self):
"""Testing the Table.read_sorted() method with negative step (V))."""
table = self.table
sstart = 100-24-1
sstop = 100-25-1
sortedtable = numpy.sort(table[:], order='icol')[sstart:sstop:-2]
sortedtable2 = table.read_sorted('icol', start=24, stop=25, step=-2)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05_readSorted11(self):
"""Testing the Table.read_sorted() method with start > stop."""
table = self.table
sstart = 100-137-1
sstop = 100-25-1
sortedtable = numpy.sort(table[:], order='icol')[sstart:sstop:-2]
sortedtable2 = table.read_sorted('icol', start=137, stop=25, step=-2)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05a_readSorted12(self):
"""Testing the Table.read_sorted() method with checkCSI (I)."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = table.read_sorted('icol', checkCSI=True)
if verbose:
print "Original sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test05b_readSorted12(self):
"""Testing the Table.read_sorted() method with checkCSI (II)."""
table = self.table
self.assertRaises(ValueError,
table.read_sorted, "rcol", checkCSI=False)
def test06_copy_sorted1(self):
"""Testing the Table.copy(sortby) method with no arguments."""
table = self.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol")
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = table2[:]
if verbose:
print "Original sorted table:", sortedtable
print "The values from copy:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test06_copy_sorted2(self):
"""Testing the Table.copy(sortby) method with step=-1."""
table = self.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol", step=-1)
sortedtable = numpy.sort(table[:], order='icol')[::-1]
sortedtable2 = table2[:]
if verbose:
print "Original sorted table:", sortedtable
print "The values from copy:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test06_copy_sorted3(self):
"""Testing the Table.copy(sortby) method with only a start."""
table = self.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol", start=3)
sortedtable = numpy.sort(table[:], order='icol')[3:4]
sortedtable2 = table2[:]
if verbose:
print "Original sorted table:", sortedtable
print "The values from copy:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test06_copy_sorted4(self):
"""Testing the Table.copy(sortby) method with start, stop."""
table = self.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol", start=3, stop=40)
sortedtable = numpy.sort(table[:], order='icol')[3:40]
sortedtable2 = table2[:]
if verbose:
print "Original sorted table:", sortedtable
print "The values from copy:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test06_copy_sorted5(self):
"""Testing the Table.copy(sortby) method with start, stop, step."""
table = self.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol",
start=3, stop=33, step=5)
sortedtable = numpy.sort(table[:], order='icol')[3:33:5]
sortedtable2 = table2[:]
if verbose:
print "Original sorted table:", sortedtable
print "The values from copy:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test06_copy_sorted6(self):
"""Testing the Table.copy(sortby) method after table re-opening."""
self._reopen(mode='a')
table = self.h5file.root.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol")
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = table2[:]
if verbose:
print "Original sorted table:", sortedtable
print "The values from copy:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test06_copy_sorted7(self):
"""Testing the `checkCSI` parameter of Table.copy() (I)."""
table = self.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol")
self.assertRaises(ValueError,
table2.copy, "/", 'table3',
sortby="rcol", checkCSI=False)
def test06_copy_sorted8(self):
"""Testing the `checkCSI` parameter of Table.copy() (II)."""
table = self.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol")
self.assertRaises(ValueError,
table2.copy, "/", 'table3',
sortby="rcol", checkCSI=True)
def test07_isCSI_noelements(self):
"""Testing the representation of an index with no elements."""
t2 = self.h5file.create_table('/', 't2', self.MyDescription)
irows = t2.cols.rcol.create_csindex()
if verbose:
print "repr(t2)-->\n", repr(t2)
self.assertEqual(irows, 0)
self.assertEqual(t2.colindexes['rcol'].is_csi, False)
class ReadSortedIndexTestCase(TempFileMixin, PyTablesTestCase):
"""Test case for testing sorted reading in a "full" sorted column."""
nrows = 100
nrowsinbuf = 11
class MyDescription(IsDescription):
rcol = IntCol(pos=1)
icol = IntCol(pos=2)
def setUp(self):
super(ReadSortedIndexTestCase, self).setUp()
table = self.h5file.create_table('/', 'table', self.MyDescription)
row = table.row
nrows = self.nrows
for i in xrange(nrows):
row['rcol'] = i
row['icol'] = nrows - i
row.append()
table.flush()
self.table = table
self.icol = self.table.cols.icol
# A full index with maximum optlevel should always be completely sorted
self.icol.create_index(optlevel=self.optlevel, kind="full",
_blocksizes=small_blocksizes)
def test01_readSorted1(self):
"""Testing the Table.read_sorted() method with no arguments."""
table = self.table
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = table.read_sorted('icol')
if verbose:
print "Sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
# Compare with the sorted read table because we have no
# guarantees that read_sorted returns a completely sorted table
self.assertTrue(allequal(sortedtable,
numpy.sort(sortedtable2, order="icol")))
def test01_readSorted2(self):
"""Testing the Table.read_sorted() method with no arguments (re-open)."""
self._reopen()
table = self.h5file.root.table
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = table.read_sorted('icol')
if verbose:
print "Sorted table:", sortedtable
print "The values from read_sorted:", sortedtable2
# Compare with the sorted read table because we have no
# guarantees that read_sorted returns a completely sorted table
self.assertTrue(allequal(sortedtable,
numpy.sort(sortedtable2, order="icol")))
def test02_copy_sorted1(self):
"""Testing the Table.copy(sortby) method."""
table = self.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol")
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = numpy.sort(table2[:], order='icol')
if verbose:
print "Original table:", table2[:]
print "The sorted values from copy:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
def test02_copy_sorted2(self):
"""Testing the Table.copy(sortby) method after table re-opening."""
self._reopen(mode='a')
table = self.h5file.root.table
# Copy to another table
table.nrowsinbuf = self.nrowsinbuf
table2 = table.copy("/", 'table2', sortby="icol")
sortedtable = numpy.sort(table[:], order='icol')
sortedtable2 = numpy.sort(table2[:], order='icol')
if verbose:
print "Original table:", table2[:]
print "The sorted values from copy:", sortedtable2
self.assertTrue(allequal(sortedtable, sortedtable2))
class ReadSortedIndex0(ReadSortedIndexTestCase):
optlevel = 0
class ReadSortedIndex3(ReadSortedIndexTestCase):
optlevel = 3
class ReadSortedIndex6(ReadSortedIndexTestCase):
optlevel = 6
class ReadSortedIndex9(ReadSortedIndexTestCase):
optlevel = 9
class Issue156TestBase(PyTablesTestCase):
# field name in table according to which test_copysort() sorts the table
sort_field = None
def setUp(self):
# create hdf5 file
self.filename = tempfile.mktemp(".hdf5")
self.file = open_file(self.filename, mode="w")
# create nested table
class Foo(IsDescription):
frame = UInt16Col()
class Bar(IsDescription):
code = UInt16Col()
table = self.file.create_table('/', 'foo', Foo,
filters=Filters(3, 'zlib'),
createparents=True)
self.file.flush()
# fill table with 10 random numbers
for k in xrange(10):
row = table.row
row['frame'] = numpy.random.random_integers(0, 2**16-1)
row['Bar/code'] = numpy.random.random_integers(0, 2**16-1)
row.append()
self.file.flush()
def tearDown(self):
self.file.close()
os.remove(self.filename)
def test_copysort(self):
# copy table
oldNode = self.file.get_node('/foo')
# create completely sorted index on a main column
oldNode.colinstances[self.sort_field].create_csindex()
# this fails on ade2ba123efd267fd31
# see gh-156
new_node = oldNode.copy(newname='foo2', overwrite=True,
sortby=self.sort_field, checkCSI=True,
propindexes=True)
# check column is sorted
self.assertTrue(numpy.all(
new_node.col(self.sort_field) == sorted(oldNode.col(self.sort_field))))
# check index is available
self.assertTrue(self.sort_field in new_node.colindexes)
# check CSI was propagated
self.assertTrue(new_node.colindexes[self.sort_field].is_csi)
class Issue156TestCase01(Issue156TestBase):
# sort by field from non nested entry
sort_field = 'frame'
class Issue156TestCase02(Issue156TestBase):
# sort by field from nested entry
sort_field = 'Bar/code'
class Issue119Time32ColTestCase(PyTablesTestCase):
""" TimeCol not properly indexing """
col_typ = Time32Col
values = [
0.93240451618785880,
0.76322375510776170,
0.16695030056300875,
0.91259117097807850,
0.93977847053454630,
0.51450406513503090,
0.24452129962257563,
0.85475938924825230,
0.32512326762476930,
0.75127635627046820,
]
def setUp(self):
# create hdf5 file
self.filename = tempfile.mktemp(".hdf5")
self.file = open_file(self.filename, mode="w")
class Descr(IsDescription):
when = self.col_typ(pos = 1)
value = Float32Col(pos = 2)
self.table = self.file.create_table('/', 'test', Descr)
self.t = 1321031471.0 # 11/11/11 11:11:11
data = [(self.t + i, item) for i, item in enumerate(self.values)]
self.table.append(data)
self.file.flush()
def tearDown(self):
self.file.close()
os.remove(self.filename)
def test_timecol_issue(self):
tbl = self.table
t = self.t
wherestr = '(when >= %d) & (when < %d)'%(t, t+5)
no_index = tbl.read_where(wherestr)
tbl.cols.when.create_index(_verbose = False)
with_index = tbl.read_where(wherestr)
self.assertTrue((no_index == with_index).all())
class Issue119Time64ColTestCase(Issue119Time32ColTestCase):
col_typ = Time64Col
#----------------------------------------------------------------------
def suite():
theSuite = unittest.TestSuite()
niter = 1
# heavy = 1 # Uncomment this only for testing purposes!
for n in range(niter):
theSuite.addTest(unittest.makeSuite(BasicReadTestCase))
theSuite.addTest(unittest.makeSuite(ZlibReadTestCase))
theSuite.addTest(unittest.makeSuite(BloscReadTestCase))
theSuite.addTest(unittest.makeSuite(LZOReadTestCase))
theSuite.addTest(unittest.makeSuite(Bzip2ReadTestCase))
theSuite.addTest(unittest.makeSuite(ShuffleReadTestCase))
theSuite.addTest(unittest.makeSuite(Fletcher32ReadTestCase))
theSuite.addTest(unittest.makeSuite(ShuffleFletcher32ReadTestCase))
theSuite.addTest(unittest.makeSuite(OneHalfTestCase))
theSuite.addTest(unittest.makeSuite(UpperBoundTestCase))
theSuite.addTest(unittest.makeSuite(LowerBoundTestCase))
theSuite.addTest(unittest.makeSuite(AI1TestCase))
theSuite.addTest(unittest.makeSuite(AI2TestCase))
theSuite.addTest(unittest.makeSuite(AI9TestCase))
theSuite.addTest(unittest.makeSuite(DeepTableIndexTestCase))
theSuite.addTest(unittest.makeSuite(IndexPropsChangeTestCase))
theSuite.addTest(unittest.makeSuite(IndexFiltersTestCase))
theSuite.addTest(unittest.makeSuite(OldIndexTestCase))
theSuite.addTest(unittest.makeSuite(CompletelySortedIndexTestCase))
theSuite.addTest(unittest.makeSuite(ManyNodesTestCase))
theSuite.addTest(unittest.makeSuite(ReadSortedIndex0))
theSuite.addTest(unittest.makeSuite(ReadSortedIndex3))
theSuite.addTest(unittest.makeSuite(ReadSortedIndex6))
theSuite.addTest(unittest.makeSuite(ReadSortedIndex9))
theSuite.addTest(unittest.makeSuite(Issue156TestCase01))
theSuite.addTest(unittest.makeSuite(Issue156TestCase02))
theSuite.addTest(unittest.makeSuite(Issue119Time32ColTestCase))
theSuite.addTest(unittest.makeSuite(Issue119Time64ColTestCase))
if heavy:
# These are too heavy for normal testing
theSuite.addTest(unittest.makeSuite(AI4bTestCase))
theSuite.addTest(unittest.makeSuite(AI5TestCase))
theSuite.addTest(unittest.makeSuite(AI6TestCase))
theSuite.addTest(unittest.makeSuite(AI7TestCase))
theSuite.addTest(unittest.makeSuite(AI8TestCase))
theSuite.addTest(unittest.makeSuite(AI10TestCase))
theSuite.addTest(unittest.makeSuite(AI11TestCase))
theSuite.addTest(unittest.makeSuite(AI12TestCase))
return theSuite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 38.298691
| 88
| 0.600481
| 10,902
| 96,551
| 5.234452
| 0.063016
| 0.022868
| 0.031157
| 0.01451
| 0.789752
| 0.756352
| 0.732678
| 0.703904
| 0.669278
| 0.628255
| 0
| 0.029064
| 0.290841
| 96,551
| 2,520
| 89
| 38.313889
| 0.804381
| 0.069911
| 0
| 0.673763
| 0
| 0
| 0.103459
| 0.006953
| 0
| 0
| 0
| 0
| 0.125067
| 0
| null | null | 0
| 0.005322
| null | null | 0.159127
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f8f0f8244d81c561653a797d04424b2fb4a21c5d
| 94,209
|
py
|
Python
|
test/fixtures/vectors.py
|
FuzzyBearBTC/counterpartyd
|
0733463249b71901daba5d73c7b30a334860bb0a
|
[
"MIT"
] | 1
|
2020-10-15T06:42:34.000Z
|
2020-10-15T06:42:34.000Z
|
test/fixtures/vectors.py
|
FuzzyBearBTC/counterpartyd
|
0733463249b71901daba5d73c7b30a334860bb0a
|
[
"MIT"
] | null | null | null |
test/fixtures/vectors.py
|
FuzzyBearBTC/counterpartyd
|
0733463249b71901daba5d73c7b30a334860bb0a
|
[
"MIT"
] | null | null | null |
from .params import ADDR, MULTISIGADDR, DEFAULT_PARAMS as DP
UNITTEST_VECTOR = {
'burn': {
'validate': [{
'in': (ADDR[0], DP['unspendable'], DP['burn_quantity'], DP['burn_start']),
'out': ([])
}, {
'in': (ADDR[0], DP['unspendable'], 1.1 * DP['burn_quantity'], DP['burn_start']),
'out': (['quantity must be in satoshis'])
}, {
'in': (ADDR[0], ADDR[1], DP['burn_quantity'], DP['burn_start']),
'out': (['wrong destination address'])
}, {
'in': (ADDR[0], DP['unspendable'], -1 * DP['burn_quantity'], DP['burn_start']),
'out': (['negative quantity'])
}, {
'in': (ADDR[0], DP['unspendable'], DP['burn_quantity'], DP['burn_start'] - 2),
'out': (['too early'])
}, {
'in': (ADDR[0], DP['unspendable'], DP['burn_quantity'], DP['burn_end'] + 1),
'out': (['too late'])
}, {
'in': (ADDR[0], ADDR[1], 1.1 * DP['burn_quantity'], DP['burn_start'] - 2),
'out': (['wrong destination address', 'quantity must be in satoshis'])
}, {
'in': (ADDR[0], ADDR[1], DP['burn_quantity'], DP['burn_start'] - 2),
'out': (['wrong destination address', 'too early'])
}, {
'in': (MULTISIGADDR[0], DP['unspendable'], DP['burn_quantity'], DP['burn_start']),
'out': ([])
}],
'compose': [{
'in': (ADDR[1], DP['burn_quantity']),
'out': ('mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', [('mvCounterpartyXXXXXXXXXXXXXXW24Hef', 62000000)], None)
}, {
'in': (ADDR[0], DP['burn_quantity']),
'error': ('ComposeError', '1 BTC may be burned per address')
}, {
'in': (MULTISIGADDR[0], int(DP['quantity'] / 2)),
'out': ('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [('mvCounterpartyXXXXXXXXXXXXXXW24Hef', 50000000)], None)
}],
'parse': [{
'in': ({'block_index': DP['default_block'], 'destination': 'mvCounterpartyXXXXXXXXXXXXXXW24Hef', 'fee': 10000, 'block_time': 1554090000000, 'supported': 1, 'btc_amount': 62000000, 'data': b'', 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'tx_index': 502, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8'},),
'records': [
{'table': 'burns', 'values': {'tx_index': 502, 'earned': 92995811159, 'burned': 62000000, 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'status': 'valid', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_index': DP['default_block']}},
{'table': 'credits', 'values': {'block_index': DP['default_block'], 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'asset': 'XCP', 'calling_function': 'burn', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'quantity': 92995811159}}
]
}, {
'in': ({'supported': 1, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'btc_amount': 50000000, 'block_index': DP['default_block'], 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'fee': 10000, 'data': b'', 'block_time': 1554090000000, 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'tx_index': 502, 'destination': 'mvCounterpartyXXXXXXXXXXXXXXW24Hef'},),
'records': [
{'table': 'burns', 'values': {'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_index': DP['default_block'], 'burned': 50000000, 'status': 'valid', 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'earned': 74996621902, 'tx_index': 502}},
{'table': 'credits', 'values': {'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'block_index': DP['default_block'], 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'XCP', 'calling_function': 'burn', 'quantity': 74996621902}}
]
}],
},
'send': {
'validate': [{
'in': (ADDR[0], ADDR[1], 'XCP', DP['quantity'], 1),
'out': ([])
}, {
'in': (ADDR[0], ADDR[1], 'BTC', DP['quantity'], 1),
'out': (['cannot send bitcoins'])
}, {
'in': (ADDR[0], ADDR[1], 'XCP', DP['quantity'] / 3, 1),
'out': (['quantity must be in satoshis'])
}, {
'in': (ADDR[0], ADDR[1], 'XCP', -1 * DP['quantity'], 1),
'out': (['negative quantity'])
}, {
'in': (ADDR[0], MULTISIGADDR[0], 'XCP', DP['quantity'], 1),
'out': ([])
}, {
'in': (ADDR[0], ADDR[1], 'MAXI', 2**63 - 1, 1),
'out': ([])
}, {
'in': (ADDR[0], ADDR[1], 'MAXI', 2**63 + 1, 1),
'out': ([])
}],
'compose': [{
'in': (ADDR[0], ADDR[1], 'XCP', DP['small']),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x02\xfa\xf0\x80')
}, {
'in': (ADDR[0], ADDR[1], 'XCP', DP['quantity'] * 10000000),
'error': ('ComposeError', 'insufficient funds')
}, {
'in': (ADDR[0], ADDR[1], 'XCP', DP['quantity'] / 3),
'error': ('ComposeError', 'quantity must be an int (in satoshi)')
}, {
'in': (ADDR[0], MULTISIGADDR[0], 'XCP', DP['quantity']),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00')
}, {
'in': (MULTISIGADDR[0], ADDR[0], 'XCP', DP['quantity']),
'out': ('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00')
}, {
'in': (MULTISIGADDR[0], MULTISIGADDR[1], 'XCP', DP['quantity']),
'out': ('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [('1_mnfAHmddVibnZNSkh8DvKaQoiEfNsxjXzH_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00')
}, {
'in': (ADDR[0], ADDR[1], 'MAXI', 2**63 - 1),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03:>\x7f\xff\xff\xff\xff\xff\xff\xff')
}, {
'in': (ADDR[0], ADDR[1], 'MAXI', 2**63 + 1),
'error': ('ComposeError', 'insufficient funds')
}],
'parse': [{
'in': ({'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'supported': 1, 'block_index': DP['default_block'], 'fee': 10000, 'block_time': 1554090000000, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'btc_amount': 7800, 'data': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00', 'tx_index': 502, 'destination': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns'},),
'records': [
{'table': 'sends', 'values': {'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_index': DP['default_block'], 'status': 'valid', 'asset': 'XCP', 'quantity': 100000000, 'tx_index': 502, 'destination': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns'}},
{'table': 'credits', 'values': {'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_index': DP['default_block'], 'calling_function': 'send', 'asset': 'XCP', 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'quantity': 100000000}},
{'table': 'debits', 'values': {'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_index': DP['default_block'], 'asset': 'XCP', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'action': 'send', 'quantity': 100000000}}
]
}, {
'in': ({'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'btc_amount': 7800, 'block_index': DP['default_block'], 'destination': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'data': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x0b\xeb\xc2\x00', 'block_time': 1554090000000, 'fee': 10000, 'source': 'mnfAHmddVibnZNSkh8DvKaQoiEfNsxjXzH', 'tx_index': 502, 'supported': 1},),
'records': [
{'table': 'sends', 'values': {'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'quantity': 200000000, 'block_index': DP['default_block'], 'destination': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'asset': 'XCP', 'source': 'mnfAHmddVibnZNSkh8DvKaQoiEfNsxjXzH', 'tx_index': 502, 'status': 'invalid: insufficient funds'}}
]
}, {
'in': ({'block_index': DP['default_block'], 'btc_amount': 7800, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_time': 1554090000000, 'fee': 10000, 'tx_index': 502, 'data': b'\x00\x00\x00\x00\x00\x06\xca\xd8\xdc\x7f\x0bf\x00\x00\x00\x00\x00\x00\x01\xf4', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'destination': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'supported': 1},),
'records': [
{'table': 'sends', 'values': {'block_index': DP['default_block'], 'quantity': 500, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx_index': 502, 'asset': 'NODIVISIBLE', 'status': 'valid', 'destination': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc'}},
{'table': 'credits', 'values': {'block_index': DP['default_block'], 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'calling_function': 'send', 'quantity': 500, 'asset': 'NODIVISIBLE'}},
{'table': 'debits', 'values': {'block_index': DP['default_block'], 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'action': 'send', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'quantity': 500, 'asset': 'NODIVISIBLE'}}
]
}, {
'in': ({'btc_amount': 7800, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'fee': 10000, 'tx_index': 502, 'destination': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'data': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00', 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'supported': 1, 'block_time': 1554090000000, 'block_index': DP['default_block']},),
'records': [
{'table': 'sends', 'values': {'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'destination': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'asset': 'XCP', 'quantity': 100000000, 'tx_index': 502, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'status': 'valid', 'block_index': DP['default_block']}},
{'table': 'credits', 'values': {'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'quantity': 100000000, 'asset': 'XCP', 'block_index': DP['default_block'], 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'calling_function': 'send'}},
{'table': 'debits', 'values': {'action': 'send', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'quantity': 100000000, 'asset': 'XCP', 'block_index': DP['default_block'], 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d'}}
]
}, {
'in': ({'data': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00', 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'supported': 1, 'block_time': 1554090000000, 'fee': 10000, 'tx_index': 502, 'btc_amount': 7800, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'block_index': DP['default_block'], 'destination': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc'},),
'records': [
{'table': 'sends', 'values': {'quantity': 100000000, 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'status': 'valid', 'destination': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'asset': 'XCP', 'tx_index': 502, 'block_index': DP['default_block']}},
{'table': 'credits', 'values': {'quantity': 100000000, 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'calling_function': 'send', 'asset': 'XCP', 'block_index': DP['default_block'], 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc'}},
{'table': 'debits', 'values': {'quantity': 100000000, 'action': 'send', 'asset': 'XCP', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_index': DP['default_block'], 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2'}}
]
}, {
'in': ({'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'destination': '1_mnfAHmddVibnZNSkh8DvKaQoiEfNsxjXzH_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'supported': 1, 'block_time': 1554090000000, 'fee': 10000, 'block_index': DP['default_block'], 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'btc_amount': 7800, 'data': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00', 'tx_index': 502, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8'},),
'records': [
{'table': 'sends', 'values': {'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'destination': '1_mnfAHmddVibnZNSkh8DvKaQoiEfNsxjXzH_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'asset': 'XCP', 'status': 'valid', 'block_index': DP['default_block'], 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'quantity': 100000000, 'tx_index': 502}},
{'table': 'credits', 'values': {'asset': 'XCP', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'quantity': 100000000, 'address': '1_mnfAHmddVibnZNSkh8DvKaQoiEfNsxjXzH_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'block_index': DP['default_block'], 'calling_function': 'send'}},
{'table': 'debits', 'values': {'asset': 'XCP', 'action': 'send', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'quantity': 100000000, 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'block_index': DP['default_block']}}
]
}, {
'in': ({'block_index': DP['default_block'], 'block_time': 1554090000000, 'fee': 10000, 'tx_index': 502, 'tx_hash': '8fc698cf1fcd51e3d685511185c67c0a73e7b72954c6abbd29fbbbe560e043a0', 'btc_amount': 7800, 'data': b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03:>\x7f\xff\xff\xff\xff\xff\xff\xff', 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'destination': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'supported': 1, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8'},),
'records': [
{'table': 'sends', 'values': {'block_index': DP['default_block'], 'destination': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'tx_hash': '8fc698cf1fcd51e3d685511185c67c0a73e7b72954c6abbd29fbbbe560e043a0', 'quantity': 9223372036854775807, 'asset': 'MAXI', 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'status': 'valid', 'tx_index': 502}},
{'table': 'credits', 'values': {'block_index': DP['default_block'], 'asset': 'MAXI', 'quantity': 9223372036854775807, 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'calling_function': 'send', 'event': '8fc698cf1fcd51e3d685511185c67c0a73e7b72954c6abbd29fbbbe560e043a0'}},
{'table': 'debits', 'values': {'block_index': DP['default_block'], 'action': 'send', 'asset': 'MAXI', 'quantity': 9223372036854775807, 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'event': '8fc698cf1fcd51e3d685511185c67c0a73e7b72954c6abbd29fbbbe560e043a0'}}
]
}]
},
'issuance': {
'validate': [{
'in': (ADDR[0], None, 'ASSET', 1000, True, False, None, None, '', DP['default_block']),
'out': (0, 0.0, [], 50000000, '', True)
}, {
'in': (ADDR[0], None, 'BTC', 1000, True, False, None, None, '', DP['default_block']),
'out': (0, 0.0, ['cannot issue BTC or XCP'], 50000000, '', True)
}, {
'in': (ADDR[0], None, 'XCP', 1000, True, False, None, None, '', DP['default_block']),
'out': (0, 0.0, ['cannot issue BTC or XCP'], 50000000, '', True)
}, {
'in': (ADDR[0], None, 'NOSATOSHI', 1000.5, True, False, None, None, '', DP['default_block']),
'out': (0, 0.0, ['quantity must be in satoshis'], 0, '', True)
}, {
'in': (ADDR[0], None, 'INVALIDCALLDATE', 1000, True, True, 10000.5, DP['quantity'], '', DP['default_block']),
'out': (10000.5, 100000000.0, ['call_date must be epoch integer'], 0, '', True)
}, {
'in': (ADDR[0], None, 'INVALIDCALLPRICE', 1000, True, True, 1409401723, 'price', '', DP['default_block']),
'out': (1409401723, 'price', ['call_price must be a float'], 0, '', True)
}, {
'in': (ADDR[0], None, 'NEGVALUES', -1000, True, True, -1409401723, -DP['quantity'], '', DP['default_block']),
'out': (-1409401723, -100000000.0, ['negative quantity', 'negative call price', 'negative call date'], 50000000, '', True)
}, {
'in': (ADDR[2], None, 'DIVISIBLE', 1000, True, False, None, None, 'Divisible asset', DP['default_block']),
'out': (0, 0.0, ['issued by another address'], 0, 'Divisible asset', True)
}, {
'in': (ADDR[0], None, 'DIVISIBLE', 1000, False, True, 1409401723, DP['quantity'], 'Divisible asset', DP['default_block']),
'out': (1409401723, 100000000.0, ['cannot change divisibility', 'cannot change callability'], 0, 'Divisible asset', False)
}, {
'in': (ADDR[0], None, 'CALLABLE', 1000, True, True, 1409400251, DP['quantity'] / 2, 'Callable asset', DP['default_block']),
'out': (1409400251, 50000000.0, ['cannot reduce call price'], 0, 'Callable asset', True)
}, {
'in': (ADDR[0], None, 'LOCKED', 1000, True, False, None, None, 'Locked asset', DP['default_block']),
'out': (0, 0.0, ['locked asset and non‐zero quantity'], 0, 'Locked asset', True)
}, {
'in': (ADDR[0], None, 'BSSET', 1000, True, False, None, None, 'LOCK', DP['default_block']),
'out': (0, 0.0, ['cannot lock a non‐existent asset'], 50000000, 'LOCK', True)
}, {
'in': (ADDR[0], ADDR[1], 'BSSET', 1000, True, False, None, None, '', DP['default_block']),
'out': (0, 0.0, ['cannot transfer a non‐existent asset', 'cannot issue and transfer simultaneously'], 50000000, '', True)
}, {
'in': (ADDR[2], None, 'BSSET', 1000, True, False, None, None, '', DP['default_block']),
'out': (0, 0.0, ['insufficient funds'], 50000000, '', True)
}, {
'in': (ADDR[0], None, 'BSSET', 2**63, True, False, None, None, '', DP['default_block']),
'out': (0, 0.0, ['total quantity overflow'], 50000000, '', True)
}, {
'in': (ADDR[0], ADDR[1], 'DIVISIBLE', 1000, True, False, None, None, 'Divisible asset', DP['default_block']),
'out': (0, 0.0, ['cannot issue and transfer simultaneously'], 0, 'Divisible asset', True)
}, {
'in': (ADDR[0], None, 'MAXIMUM', 2**63-1, True, False, None, None, 'Maximum quantity', DP['default_block']),
'out': (0, 0.0, [], 50000000, 'Maximum quantity', True)
}, {
'in': (ADDR[0], None, 'DIVISIBLE', 2**63-1, True, False, None, None, 'Maximum quantity', DP['default_block']),
'out': (0, 0.0, ['total quantity overflow'], 0, 'Maximum quantity', True)
}],
'compose': [{
'in': (ADDR[0], None, 'ASSET', 1000, True, False, None, None, ''),
'error': ('AssetNameError', 'non‐numeric asset name starts with ‘A’')
}, {
'in': (ADDR[0], None, 'BSSET1', 1000, True, False, None, None, ''),
'error': ('AssetNameError', "('invalid character:', '1')")
}, {
'in': (ADDR[0], None, 'SET', 1000, True, False, None, None, ''),
'error': ('AssetNameError', 'too short')
}, {
'in': (ADDR[0], None, 'BSSET', 1000, True, False, None, None, ''),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\x14\x00\x00\x00\x00\x00\x0b\xfc\xe3\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
}, {
'in': (ADDR[0], ADDR[1], 'DIVISIBLE', 0, True, False, None, None, ''),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', None)], b'\x00\x00\x00\x14\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
}, {
'in': (MULTISIGADDR[0], None, 'BSSET', 1000, True, False, None, None, ''),
'out': ('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [], b'\x00\x00\x00\x14\x00\x00\x00\x00\x00\x0b\xfc\xe3\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
}, {
'in': (ADDR[0], MULTISIGADDR[0], 'DIVISIBLE', 0, True, False, None, None, ''),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', None)], b'\x00\x00\x00\x14\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
}, {
'in': (ADDR[0], None, 'MAXIMUM', 2**63-1, True, False, None, None, 'Maximum quantity'),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\x14\x00\x00\x00\x00\xdd\x96\xd2t\x7f\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10Maximum quantity')
}, {
'in': (ADDR[0], None, 'A{}'.format(2**64 - 1), 1000, None, None, None, None, None),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\x14\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
}, {
'in': (ADDR[0], None, 'A{}'.format(2**64), 1000, True, False, None, None, ''),
'error': ('AssetNameError', 'numeric asset name not in range')
}, {
'in': (ADDR[0], None, 'A{}'.format(26**12), 1000, True, False, None, None, ''),
'error': ('AssetNameError', 'numeric asset name not in range')
}],
'parse': [{
'in': ({'supported': 1, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'data': b'\x00\x00\x00\x14\x00\x00\x00\x00\x00\xbaOs\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'btc_amount': None, 'destination': None, 'block_time': 1554090000000, 'block_index': DP['default_block'], 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'fee': 10000, 'tx_index': 502, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8'},),
'records': [
{'table': 'issuances', 'values': {'locked': 0, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'call_price': 0.0, 'description': '', 'divisible': 1, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'asset': 'BASSET', 'issuer': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'call_date': 0, 'callable': 0, 'status': 'valid', 'tx_index': 502, 'fee_paid': 50000000, 'block_index': DP['default_block'], 'transfer': 0, 'quantity': 1000}},
{'table': 'credits', 'values': {'calling_function': 'issuance', 'block_index': DP['default_block'], 'asset': 'BASSET', 'quantity': 1000, 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d'}},
{'table': 'debits', 'values': {'block_index': DP['default_block'], 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'XCP', 'quantity': 50000000, 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'action': 'issuance fee'}}
]
}, {
'in': ({'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_time': 1554090000000, 'btc_amount': 7800, 'supported': 1, 'tx_index': 502, 'block_index': DP['default_block'], 'data': b'\x00\x00\x00\x14\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'fee': 10000, 'destination': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns'},),
'records': [
{'table': 'issuances', 'values': {'locked': 0, 'call_date': 0, 'block_index': DP['default_block'], 'description': '', 'quantity': 0, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'DIVISIBLE', 'callable': 0, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'fee_paid': 0, 'issuer': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'tx_index': 502, 'transfer': 1, 'call_price': 0.0, 'divisible': 1, 'status': 'valid'}}
]
}, {
'in': ({'tx_index': 502, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'data': b'\x00\x00\x00\x14\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04LOCK', 'block_time': 1554090000000, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'fee': 10000, 'destination': None, 'supported': 1, 'block_index': DP['default_block'], 'btc_amount': None},),
'records': [
{'table': 'issuances', 'values': {'tx_index': 502, 'quantity': 0, 'block_index': DP['default_block'], 'callable': 0, 'status': 'valid', 'locked': 1, 'description': 'Divisible asset', 'divisible': 1, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'transfer': 0, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'call_price': 0.0, 'issuer': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'call_date': 0, 'fee_paid': 0, 'asset': 'DIVISIBLE'}}
]
}, {
'in': ({'data': b'\x00\x00\x00\x14\x00\x00\x00\x00\x00\x0b\xfc\xe3\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'supported': 1, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_index': DP['default_block'], 'destination': '', 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'btc_amount': 0, 'tx_index': 502, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'block_time': 1554090000000, 'fee': 10000},),
'records': [
{'table': 'issuances', 'values': {'issuer': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'asset': 'BSSET', 'call_date': 0, 'call_price': 0.0, 'description': '', 'block_index': DP['default_block'], 'transfer': 0, 'quantity': 1000, 'status': 'valid', 'divisible': 1, 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'locked': 0, 'tx_index': 502, 'callable': 0, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'fee_paid': 50000000}},
{'table': 'credits', 'values': {'quantity': 1000, 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'asset': 'BSSET', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'calling_function': 'issuance', 'block_index': DP['default_block']}},
{'table': 'debits', 'values': {'quantity': 50000000, 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'asset': 'XCP', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'action': 'issuance fee', 'block_index': DP['default_block']}}
]
}, {
'in': ({'fee': 10000, 'block_time': 1554090000000, 'data': b'\x00\x00\x00\x14\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'block_index': DP['default_block'], 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'btc_amount': 7800, 'tx_index': 502, 'destination': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'supported': 1, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc'},),
'records': [
{'table': 'issuances', 'values': {'call_date': 0, 'call_price': 0.0, 'fee_paid': 0, 'divisible': 1, 'block_index': DP['default_block'], 'tx_index': 502, 'description': '', 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'transfer': 1, 'issuer': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'locked': 0, 'asset': 'DIVISIBLE', 'callable': 0, 'status': 'valid', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'quantity': 0}},
{'table': 'debits', 'values': {'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'XCP', 'action': 'issuance fee', 'block_index': DP['default_block'], 'quantity': 0}}
]
}, {
'in': ({'data': b'\x00\x00\x00\x14\x00\x00\x00\x00\xdd\x96\xd2t\x7f\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10Maximum quantity', 'block_time': 1554090000000, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'block_index': DP['default_block'], 'btc_amount': 0, 'fee': 10000, 'supported': 1, 'tx_index': 502, 'destination': '', 'tx_hash': '71da4fac29d6442ef3ff13f291860f512a888161ae9e574f313562851912aace'},),
'records': [
{'table': 'issuances', 'values': {'issuer': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'call_price': 0.0, 'transfer': 0, 'call_date': 0, 'divisible': 1, 'description': 'Maximum quantity', 'asset': 'MAXIMUM', 'tx_index': 502, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_index': DP['default_block'], 'quantity': 9223372036854775807, 'status': 'valid', 'callable': 0, 'tx_hash': '71da4fac29d6442ef3ff13f291860f512a888161ae9e574f313562851912aace', 'fee_paid': 50000000, 'locked': 0}},
{'table': 'credits', 'values': {'asset': 'MAXIMUM', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_index': DP['default_block'], 'quantity': 9223372036854775807, 'event': '71da4fac29d6442ef3ff13f291860f512a888161ae9e574f313562851912aace', 'calling_function': 'issuance'}},
{'table': 'debits', 'values': {'asset': 'XCP', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_index': DP['default_block'], 'action': 'issuance fee', 'quantity': 50000000, 'event': '71da4fac29d6442ef3ff13f291860f512a888161ae9e574f313562851912aace'}}
]
}, {
'in': ({'data': b'\x00\x00\x00\x14\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', 'tx_index': 502, 'tx_hash': '4188c1f7aaae56ce3097ef256cdbcb644dd43c84e237b4add4f24fd4848cb2c7', 'destination': '', 'fee': 10000, 'btc_amount': 0, 'block_time': 2815010000000, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'supported': 1, 'block_index': 281501, 'block_hash': '8e80b430efbe3e1b7cc13d7ec51c1e47a16b0fa23d6dd3c939fb6c4d4cfa311e1f25072500f5f9872373b54c72424b3557fccd68915d00c0afb6523702e11b6a'},),
'records': [
{'table': 'issuances', 'values': {'transfer': 0, 'tx_hash': '4188c1f7aaae56ce3097ef256cdbcb644dd43c84e237b4add4f24fd4848cb2c7', 'divisible': 1, 'status': 'valid', 'asset': 'A18446744073709551615', 'description': '', 'call_date': 0, 'tx_index': 502, 'call_price': 0.0, 'callable': 0, 'issuer': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'fee_paid': 0, 'locked': 0, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_index': 281501, 'quantity': 1000}},
{'table': 'credits', 'values': {'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'asset': 'A18446744073709551615', 'event': '4188c1f7aaae56ce3097ef256cdbcb644dd43c84e237b4add4f24fd4848cb2c7', 'block_index': 281501, 'quantity': 1000, 'calling_function': 'issuance'}},
{'table': 'debits', 'values': {'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'action': 'issuance fee', 'asset': 'XCP', 'event': '4188c1f7aaae56ce3097ef256cdbcb644dd43c84e237b4add4f24fd4848cb2c7', 'block_index': 281501, 'quantity': 0}}
]
}]
},
'dividend': {
'validate': [{
'in': (ADDR[0], DP['quantity'] * 1000, 'DIVISIBLE', 'XCP', DP['default_block']),
'out': (1100000000000, [{'address_quantity': 100000000, 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'dividend_quantity': 100000000000}, {'address_quantity': 1000000000, 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'dividend_quantity': 1000000000000}], ['insufficient funds (XCP)'], 0)
}, {
'in': (ADDR[0], DP['quantity'], 'BTC', 'XCP', DP['default_block']),
'out': (None, None, ['cannot pay dividends to holders of BTC', 'no such asset, BTC.'], 0)
}, {
'in': (ADDR[0], DP['quantity'], 'XCP', 'XCP', DP['default_block']),
'out': (None, None, ['cannot pay dividends to holders of XCP', 'no such asset, XCP.'], 0)
}, {
'in': (ADDR[0], DP['quantity'], 'NOASSET', 'XCP', DP['default_block']),
'out': (None, None, ['no such asset, NOASSET.'], 0)
}, {
'in': (ADDR[1], DP['quantity'], 'DIVISIBLE', 'XCP', DP['default_block']),
'out': (99900000000, [{'dividend_quantity': 98900000000, 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'address_quantity': 98900000000}, {'dividend_quantity': 1000000000, 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'address_quantity': 1000000000}], ['only issuer can pay dividends', 'insufficient funds (XCP)'], 0)
}, {
'in': (ADDR[0], DP['quantity'], 'DIVISIBLE', 'NOASSET', DP['default_block']),
'out': (None, None, ['no such dividend asset, NOASSET.'], 0)
}],
'compose': [{
'in': (ADDR[0], DP['quantity'], 'DIVISIBLE', 'XCP'),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x002\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x01')
}, {
'in': (ADDR[0], 1, 'NODIVISIBLE', 'XCP'),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x002\x00\x00\x00\x00\x00\x00\x00\x01\x00\x06\xca\xd8\xdc\x7f\x0bf\x00\x00\x00\x00\x00\x00\x00\x01')
}],
'parse': [{
'in': ({'tx_hash': '450c4ced564fa52a84746ecd79d64db6f124bddee19ff2c3cd926adea673ce4c', 'supported': 1, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'data': b'\x00\x00\x002\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x01', 'tx_index': 502, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'block_index': DP['default_block'], 'btc_amount': 0, 'fee': 10000, 'destination': '', 'block_time': 1554090000000},),
'records': [
{'table': 'dividends', 'values': {'tx_hash': '450c4ced564fa52a84746ecd79d64db6f124bddee19ff2c3cd926adea673ce4c', 'fee_paid': 40000, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'tx_index': 502, 'block_index': DP['default_block'], 'dividend_asset': 'XCP', 'status': 'valid', 'quantity_per_unit': 100000000, 'asset': 'DIVISIBLE'}},
{'table': 'credits', 'values': {'calling_function': 'dividend', 'asset': 'XCP', 'block_index': DP['default_block'], 'quantity': 100000000, 'event': '450c4ced564fa52a84746ecd79d64db6f124bddee19ff2c3cd926adea673ce4c', 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns'}},
{'table': 'credits', 'values': {'calling_function': 'dividend', 'asset': 'XCP', 'block_index': DP['default_block'], 'quantity': 1000000000, 'event': '450c4ced564fa52a84746ecd79d64db6f124bddee19ff2c3cd926adea673ce4c', 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2'}},
{'table': 'debits', 'values': {'asset': 'XCP', 'action': 'dividend', 'block_index': DP['default_block'], 'quantity': 1100000000, 'event': '450c4ced564fa52a84746ecd79d64db6f124bddee19ff2c3cd926adea673ce4c', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc'}},
{'table': 'debits', 'values': {'asset': 'XCP', 'action': 'dividend fee', 'block_index': DP['default_block'], 'quantity': 40000, 'event': '450c4ced564fa52a84746ecd79d64db6f124bddee19ff2c3cd926adea673ce4c', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc'}}
]
}, {
'in': ({'tx_index': 502, 'btc_amount': 0, 'block_time': 1554090000000, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'tx_hash': '5a36e9d939e70917695065b11b728f7ccbc7b828ae3baca1115885d8889e67c7', 'fee': 10000, 'block_index': DP['default_block'], 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'supported': 1, 'destination': '', 'data': b'\x00\x00\x002\x00\x00\x00\x00\x00\x00\x00\x01\x00\x06\xca\xd8\xdc\x7f\x0bf\x00\x00\x00\x00\x00\x00\x00\x01'},),
'records': [
{'table': 'dividends', 'values': {'tx_index': 502, 'asset': 'NODIVISIBLE', 'fee_paid': 40000, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'tx_hash': '5a36e9d939e70917695065b11b728f7ccbc7b828ae3baca1115885d8889e67c7', 'dividend_asset': 'XCP', 'block_index': DP['default_block'], 'quantity_per_unit': 1, 'status': 'valid'}},
{'table': 'credits', 'values': {'asset': 'XCP', 'event': '5a36e9d939e70917695065b11b728f7ccbc7b828ae3baca1115885d8889e67c7', 'calling_function': 'dividend', 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'quantity': 5, 'block_index': DP['default_block']}},
{'table': 'credits', 'values': {'asset': 'XCP', 'event': '5a36e9d939e70917695065b11b728f7ccbc7b828ae3baca1115885d8889e67c7', 'calling_function': 'dividend', 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'quantity': 10, 'block_index': DP['default_block']}},
{'table': 'debits', 'values': {'asset': 'XCP', 'event': '5a36e9d939e70917695065b11b728f7ccbc7b828ae3baca1115885d8889e67c7', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'quantity': 15, 'block_index': DP['default_block'], 'action': 'dividend'}},
{'table': 'debits', 'values': {'asset': 'XCP', 'event': '5a36e9d939e70917695065b11b728f7ccbc7b828ae3baca1115885d8889e67c7', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'quantity': 40000, 'block_index': DP['default_block'], 'action': 'dividend fee'}}
]
}]
},
'order': {
'validate': [{
'in': (ADDR[0], 'DIVISIBLE', DP['quantity'], 'XCP', DP['quantity'], 2000, 0, DP['default_block']),
'out': ([])
}, {
'in': (ADDR[0], 'BTC', DP['quantity'], 'BTC', DP['quantity'], 2000, 0, DP['default_block']),
'out': (['cannot trade BTC for itself'])
},{
'in': (ADDR[0], 'DIVISIBLE', DP['quantity'] / 3, 'XCP', DP['quantity'], 2000, 0, DP['default_block']),
'out': (['give_quantity must be in satoshis'])
},{
'in': (ADDR[0], 'DIVISIBLE', DP['quantity'], 'XCP', DP['quantity'] / 3, 2000, 0, DP['default_block']),
'out': (['get_quantity must be in satoshis'])
},{
'in': (ADDR[0], 'DIVISIBLE', DP['quantity'], 'XCP', DP['quantity'], 1.5, 0, DP['default_block']),
'out': (['expiration must be expressed as an integer block delta'])
},{
'in': (ADDR[0], 'DIVISIBLE', -DP['quantity'], 'XCP', -DP['quantity'], -2000, -10000, DP['default_block']),
'out': (['non‐positive give quantity', 'non‐positive get quantity', 'negative fee_required', 'negative expiration'])
},{
'in': (ADDR[0], 'DIVISIBLE', 0, 'XCP', DP['quantity'], 2000, 0, DP['default_block']),
'out': (['non‐positive give quantity', 'zero give or zero get'])
},{
'in': (ADDR[0], 'NOASSETA', DP['quantity'], 'NOASSETB', DP['quantity'], 2000, 0, DP['default_block']),
'out': (['no such asset to give (NOASSETA)', 'no such asset to get (NOASSETB)'])
},{
'in': (ADDR[0], 'DIVISIBLE', 2**63 + 10, 'XCP', DP['quantity'], 4 * 2016 + 10, 0, DP['default_block']),
'out': (['expiration overflow', 'integer overflow'])
}],
'compose': [{
'in': (ADDR[0], 'BTC', DP['small'], 'XCP', DP['small'] * 2, DP['expiration'], 0),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00')
}, {
'in': (ADDR[0], 'XCP', round(DP['small'] * 2.1), 'BTC', DP['small'], DP['expiration'], DP['fee_required']),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x06B,@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\n\x00\x00\x00\x00\x00\r\xbb\xa0')
}, {
'in': (MULTISIGADDR[0], 'BTC', DP['small'], 'XCP', DP['small'] * 2, DP['expiration'], 0),
'out': ('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00')
}, {
'in': (MULTISIGADDR[0], 'XCP', round(DP['small'] * 2.1), 'BTC', DP['small'], DP['expiration'], DP['fee_required']),
'out': ('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x06B,@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\n\x00\x00\x00\x00\x00\r\xbb\xa0')
}, {
'in': (ADDR[0], 'MAXI', 2**63 - 1, 'XCP', DP['quantity'], DP['expiration'], DP['fee_required']),
'out': ('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x03:>\x7f\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\n\x00\x00\x00\x00\x00\r\xbb\xa0')
}, {
'in': (ADDR[0], 'MAXI', 2**63 + 1, 'XCP', DP['quantity'], DP['expiration'], DP['fee_required']),
'error': ('ComposeError', 'insufficient funds')
}],
'parse': [{
'in': ({'destination': None, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'block_time': 1554090000000, 'block_index': DP['default_block'], 'tx_index': 502, 'data': b'\x00\x00\x00\n\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x07\xd0\x00\x00\x00\x00\x00\x00\x00\x00', 'fee': 10000, 'btc_amount': None, 'supported': 1, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8'},),
'records': [
{'table': 'orders', 'values': {'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'fee_required_remaining': 0, 'block_index': DP['default_block'], 'status': 'filled', 'get_quantity': 100000000, 'fee_provided_remaining': 10000, 'get_asset': 'XCP', 'give_remaining': 0, 'fee_provided': 10000, 'expiration': 2000, 'get_remaining': 0, 'tx_index': 502, 'give_asset': 'DIVISIBLE', 'expire_index': DP['default_block'] + 2000, 'give_quantity': 100000000, 'fee_required': 0}},
{'table': 'order_matches', 'values': {'status': 'completed', 'tx0_index': 7, 'tx0_address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'forward_quantity': 100000000, 'tx1_address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'backward_asset': 'DIVISIBLE', 'tx0_hash': '074fa38a84a81c0ed7957484ebe73836104d3068f66b189e05a7cf0b95c737f3', 'tx0_expiration': 2000, 'id': '074fa38a84a81c0ed7957484ebe73836104d3068f66b189e05a7cf0b95c737f3db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx1_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx1_expiration': 2000, 'tx1_block_index': DP['default_block'], 'forward_asset': 'XCP', 'fee_paid': 0, 'match_expire_index': DP['default_block'] + 20, 'tx0_block_index': DP['default_block'] - 495, 'backward_quantity': 100000000, 'tx1_index': 502, 'block_index': DP['default_block']}},
{'table': 'credits', 'values': {'event': '074fa38a84a81c0ed7957484ebe73836104d3068f66b189e05a7cf0b95c737f3db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'XCP', 'block_index': DP['default_block'], 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'quantity': 100000000, 'calling_function': 'order match'}},
{'table': 'debits', 'values': {'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_index': DP['default_block'], 'quantity': 100000000, 'asset': 'DIVISIBLE', 'action': 'open order'}},
{'table': 'credits', 'values': {'event': '074fa38a84a81c0ed7957484ebe73836104d3068f66b189e05a7cf0b95c737f3db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'DIVISIBLE', 'block_index': DP['default_block'], 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'quantity': 100000000, 'calling_function': 'order match'}},
{'table': 'credits', 'values': {'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'XCP', 'block_index': DP['default_block'], 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'quantity': 0, 'calling_function': 'filled'}},
{'table': 'credits', 'values': {'event': '074fa38a84a81c0ed7957484ebe73836104d3068f66b189e05a7cf0b95c737f3', 'asset': 'DIVISIBLE', 'block_index': DP['default_block'], 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'quantity': 0, 'calling_function': 'filled'}}
]
}, {
'in': ({'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'btc_amount': None, 'tx_index': 502, 'supported': 1, 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'fee': 10000, 'block_time': 1554090000000, 'block_index': DP['default_block'], 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'data': b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0fB@\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x07\xd0\x00\x00\x00\x00\x00\x00\x00\x00', 'destination': None},),
'records': [
{'table': 'orders', 'values': {'give_quantity': 1000000, 'status': 'open', 'get_remaining': 0, 'tx_index': 502, 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'give_remaining': 0, 'block_index': DP['default_block'], 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'fee_required': 0, 'fee_provided': 10000, 'give_asset': 'BTC', 'get_asset': 'XCP', 'fee_provided_remaining': 1000, 'expiration': 2000, 'expire_index': DP['default_block'] + 2000, 'fee_required_remaining': 0, 'get_quantity': 100000000}},
{'table': 'order_matches', 'values': {'forward_asset': 'XCP', 'id': 'b6db5c8412a58d9fa75bff41f8a7519353ffd4d359c7c8fa7ee1900bc05e4d9ddb6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx0_address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'tx1_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'forward_quantity': 100000000, 'match_expire_index': DP['default_block'] + 20, 'tx1_block_index': DP['default_block'], 'backward_quantity': 1000000, 'block_index': DP['default_block'], 'fee_paid': 9000, 'tx1_index': 502, 'tx1_expiration': 2000, 'tx0_hash': 'b6db5c8412a58d9fa75bff41f8a7519353ffd4d359c7c8fa7ee1900bc05e4d9d', 'tx1_address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'tx0_index': 11, 'tx0_block_index': DP['default_block'] - 491, 'backward_asset': 'BTC', 'tx0_expiration': 2000, 'status': 'pending'}}
]
}, {
'in': ({'fee': 10000, 'block_time': 1554090000000, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'destination': None, 'supported': 1, 'tx_index': 502, 'data': b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n,+\x07\xd0\x00\x00\x00\x00\x00\x00\x00\x00', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'btc_amount': None, 'block_index': DP['default_block']},),
'records': [
{'table': 'orders', 'values': {'fee_required_remaining': 0, 'expire_index': DP['default_block'] + 2000, 'status': 'open', 'expiration': 2000, 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'get_remaining': 0, 'give_remaining': 0, 'get_quantity': 666667, 'fee_required': 0, 'fee_provided_remaining': 10000, 'fee_provided': 10000, 'block_index': DP['default_block'], 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'give_quantity': 100000000, 'tx_index': 502, 'give_asset': 'XCP', 'get_asset': 'BTC'}},
{'table': 'order_matches', 'values': {'forward_asset': 'BTC', 'id': '8a63e7a516d36c17ac32999222ac282ab94fb9c5ea30637cd06660b3139510f6db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx0_address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'tx1_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'forward_quantity': 666667, 'match_expire_index': DP['default_block'] + 20, 'tx1_block_index': DP['default_block'], 'backward_quantity': 100000000, 'block_index': DP['default_block'], 'fee_paid': 0, 'tx1_index': 502, 'tx1_expiration': 2000, 'tx0_hash': '8a63e7a516d36c17ac32999222ac282ab94fb9c5ea30637cd06660b3139510f6', 'tx1_address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'tx0_index': 12, 'tx0_block_index': DP['default_block'] - 490, 'backward_asset': 'XCP', 'tx0_expiration': 2000, 'status': 'pending'}},
{'table': 'debits', 'values': {'action': 'open order', 'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'XCP', 'block_index': DP['default_block'], 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'quantity': 100000000}}
]
}, {
'in': ({'block_time': 1554090000000, 'destination': None, 'btc_amount': None, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'data': b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x84\x80\x07\xd0\x00\x00\x00\x00\x00\x00\x00\x00', 'supported': 1, 'fee': 10000, 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'tx_index': 502, 'block_index': DP['default_block'], 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8'},),
'records': [
{'table': 'orders', 'values': {'fee_provided_remaining': 10000, 'get_asset': 'BTC', 'give_remaining': 100000000, 'get_remaining': 2000000, 'get_quantity': 2000000, 'give_asset': 'XCP', 'block_index': DP['default_block'], 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'expiration': 2000, 'expire_index': DP['default_block'] + 2000, 'tx_index': 502, 'status': 'open', 'give_quantity': 100000000, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'fee_required_remaining': 0, 'fee_provided': 10000, 'fee_required': 0}},
{'table': 'debits', 'values': {'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'address': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'quantity': 100000000, 'action': 'open order', 'asset': 'XCP', 'block_index': DP['default_block']}}
]
}, {
'in': ({'data': b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xa1 \x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x07\xd0\x00\x00\x00\x00\x00\x00\x00\x00', 'tx_index': 502, 'destination': None, 'block_index': DP['default_block'], 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'btc_amount': None, 'block_time': 1554090000000, 'supported': 1, 'fee': 1000000, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns'},),
'records': [
{'table': 'orders', 'values': {'block_index': DP['default_block'], 'fee_required_remaining': 0, 'source': 'mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', 'give_remaining': 500000, 'expiration': 2000, 'give_quantity': 500000, 'get_asset': 'XCP', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'fee_provided_remaining': 1000000, 'tx_index': 502, 'fee_required': 0, 'give_asset': 'BTC', 'expire_index': DP['default_block'] + 2000, 'get_remaining': 100000000, 'fee_provided': 1000000, 'get_quantity': 100000000, 'status': 'open'}}
]
}, {
'in': ({'btc_amount': None, 'block_time': 1554090000000, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx_index': 502, 'supported': 1, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'destination': None, 'block_index': DP['default_block'], 'data': b'\x00\x00\x00\n\x00\x06\xca\xd8\xdc\x7f\x0bf\x00\x00\x00\x00\x00\x00\x01\xf4\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x07\xd0\x00\x00\x00\x00\x00\x00\x00\x00', 'fee': 10000},),
'records': [
{'table': 'orders', 'values': {'fee_required_remaining': 0, 'fee_provided_remaining': 10000, 'block_index': DP['default_block'], 'give_remaining': 500, 'status': 'open', 'fee_required': 0, 'fee_provided': 10000, 'expiration': 2000, 'give_quantity': 500, 'get_asset': 'XCP', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx_index': 502, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'get_remaining': 100000000, 'get_quantity': 100000000, 'give_asset': 'NODIVISIBLE', 'expire_index': DP['default_block'] + 2000}},
{'table': 'debits', 'values': {'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_index': DP['default_block'], 'quantity': 500, 'action': 'open order', 'asset': 'NODIVISIBLE'}}
]
}, {
'in': ({'block_index': DP['default_block'], 'data': b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'destination': '', 'fee': 10000, 'tx_index': 502, 'supported': 1, 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'block_time': 1554090000000, 'btc_amount': 0},),
'records': [
{'table': 'orders', 'values': {'block_index': DP['default_block'], 'expiration': 10, 'expire_index': DP['default_block'] + 10, 'fee_required_remaining': 0, 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'fee_provided': 10000, 'status': 'open', 'give_asset': 'BTC', 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'get_remaining': 0, 'give_remaining': 49000000, 'tx_index': 502, 'get_asset': 'XCP', 'fee_provided_remaining': 1000, 'fee_required': 0, 'give_quantity': 50000000, 'get_quantity': 100000000}},
{'table': 'order_matches', 'values': {'backward_quantity': 1000000, 'tx0_hash': 'b6db5c8412a58d9fa75bff41f8a7519353ffd4d359c7c8fa7ee1900bc05e4d9d', 'tx1_block_index': DP['default_block'], 'match_expire_index': DP['default_block'] + 20, 'tx0_address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'tx0_block_index': DP['default_block'] - 491, 'status': 'pending', 'block_index': DP['default_block'], 'tx1_address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'forward_quantity': 100000000, 'tx1_index': 502, 'fee_paid': 9000, 'id': 'b6db5c8412a58d9fa75bff41f8a7519353ffd4d359c7c8fa7ee1900bc05e4d9ddb6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'forward_asset': 'XCP', 'tx0_expiration': 2000, 'tx1_expiration': 10, 'backward_asset': 'BTC', 'tx0_index': 11, 'tx1_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d'}}
]
}, {
'in': ({'fee': 10000, 'btc_amount': 0, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx_index': 502, 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'supported': 1, 'block_time': 1554090000000, 'block_index': DP['default_block'], 'data': b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x06B,@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\n\x00\x00\x00\x00\x00\r\xbb\xa0', 'destination': ''},),
'records': [
{'table': 'orders', 'values': {'get_asset': 'BTC', 'give_asset': 'XCP', 'fee_required': 900000, 'block_index': DP['default_block'], 'expire_index': DP['default_block'] + 10, 'give_remaining': 105000000, 'fee_provided_remaining': 10000, 'tx_hash': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'tx_index': 502, 'expiration': 10, 'status': 'open', 'source': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'give_quantity': 105000000, 'get_quantity': 50000000, 'fee_provided': 10000, 'fee_required_remaining': 900000, 'get_remaining': 50000000}},
{'table': 'debits', 'values': {'event': 'db6d9052b576d973196363e11163d492f50926c2f1d1efd67b3d999817b0d04d', 'asset': 'XCP', 'quantity': 105000000, 'address': '1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', 'block_index': DP['default_block'], 'action': 'open order'}}
]
}, {
'in': ({'btc_amount': 0, 'fee': 10000, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'destination': '', 'tx_hash': '0ec7da68a67e165693afd6c97566f8f509d302bceec8d1be0100335718a40fe5', 'tx_index': 502, 'data': b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x03:>\x7f\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\n\x00\x00\x00\x00\x00\r\xbb\xa0', 'block_hash': '2d62095b10a709084b1854b262de77cb9f4f7cd76ba569657df8803990ffbfc6c12bca3c18a44edae9498e1f0f054072e16eef32dfa5e3dd4be149009115b4b8', 'supported': 1, 'block_time': 1554090000000, 'block_index': DP['default_block']},),
'records': [
{'table': 'orders', 'values': {'fee_provided_remaining': 10000, 'source': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'block_index': DP['default_block'], 'tx_index': 502, 'fee_required': 900000, 'give_asset': 'MAXI', 'status': 'open', 'get_remaining': 100000000, 'give_quantity': 9223372036854775807, 'give_remaining': 9223372036854775807, 'expiration': 10, 'get_asset': 'XCP', 'tx_hash': '0ec7da68a67e165693afd6c97566f8f509d302bceec8d1be0100335718a40fe5', 'expire_index': DP['default_block'] + 10, 'fee_provided': 10000, 'get_quantity': 100000000, 'fee_required_remaining': 900000}},
{'table': 'debits', 'values': {'quantity': 9223372036854775807, 'asset': 'MAXI', 'action': 'open order', 'address': 'mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', 'event': '0ec7da68a67e165693afd6c97566f8f509d302bceec8d1be0100335718a40fe5', 'block_index': DP['default_block']}}
]
}]
},
'bitcoin': {
'transaction': [{
'comment': 'burn',
'in': (('mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', [('mvCounterpartyXXXXXXXXXXXXXXW24Hef', 62000000)], None), {'encoding': 'multisig'}),
'out': '0100000001ebe3111881a8733ace02271dcf606b7450c41a48c1cb21fd73f4ba787b353ce4000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88acffffffff02800bb203000000001976a914a11b66a67b3ff69671c8f82254099faf374b800e88ac70ae4302000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88ac00000000'
}, {
'comment': 'multisig burn',
'in': (('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [('mvCounterpartyXXXXXXXXXXXXXXW24Hef', 50000000)], None), {'encoding': 'multisig'}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff0280f0fa02000000001976a914a11b66a67b3ff69671c8f82254099faf374b800e88ac70c9fa02000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'comment': 'send',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x02\xfa\xf0\x80'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88ac781e0000000000006951210262415bf04af834423d3dd7ada4dc727a030865759f9fba5aee78c9ea71e58798210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'send dest multisig',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae781e0000000000006951210362415bf04af834423d3dd7ada4dc727a030865759f9fba5aee7fc6fbf1e5875a210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'send source multisig',
'in': (('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00'), {'encoding': 'multisig'}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff03781e0000000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac781e0000000000006951210334caf7ca87f0fd78a01d9a0d68221e55beef3722da8be72d254dd351c26108892102bc14528340c27d005aa9e2913fd8c032ffa94625307a450077125d580099b57d210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae007df505000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'comment': 'send source and dest multisig',
'in': (('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [('1_mnfAHmddVibnZNSkh8DvKaQoiEfNsxjXzH_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00'), {'encoding': 'multisig'}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff03781e0000000000004751210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b977210378ee11c3fb97054877a809ce083db292b16d971bcdc6aa4c8f92087133729d8b52ae781e0000000000006951210334caf7ca87f0fd78a01d9a0d68221e55beef3722da8be72d254dd351c26108892102bc14528340c27d005aa9e2913fd8c032ffa94625307a450077125d580099b57d210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae007df505000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'comment': 'maximum quantity send',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', None)], b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03:>\x7f\xff\xff\xff\xff\xff\xff\xff'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88ac781e0000000000006951210362415bf04af834423d3dd7ada4dc727a0308664fa0e045a51185cce50ee58717210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'issuance',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\x14\x00\x00\x00\x00\x00\x0b\xfc\xe3\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210359415bf04af834423d3dd7adb0dc727a03086e897d9fba5aee7a331919e4871d210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'issuance',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns', None)], b'\x00\x00\x00\x14\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88ac781e0000000000006951210259415bf04af834423d3dd7adb0dc727aa153863ef89fba5aee7a331af1e4873a210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'multisig issuance',
'in': (('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [], b'\x00\x00\x00\x14\x00\x00\x00\x00\x00\x0b\xfc\xe3\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'), {'encoding': 'multisig'}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff02781e000000000000695121030fcaf7ca87f0fd78a01d9a0d7c221e55beef3cde388be72d254826b32a6008cb2102bc14528340c27d005aa9e2913fd8c032ffa94625307a450077125d580099b57d210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae789bf505000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'comment': 'maximum quantity issuance',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\x14\x00\x00\x00\x00\xdd\x96\xd2t\x7f\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10Maximum quantity'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210249415bf04af834423d3dd7adb0dc727a03d5f3a7eae045a51185cce50ee4877e210354da540fb2663b75f68ead197067a5af636736dbdcf8840c45d94079bbe724cb210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'transfer asset to multisig',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', None)], b'\x00\x00\x00\x14\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae781e0000000000006951210259415bf04af834423d3dd7adb0dc727aa153863ef89fba5aee7a331af1e4873a210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'order',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00'), {'encoding': 'multisig','fee_provided': DP['fee_provided']}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210348415bf04af834423d3dd7adaedc727a030865759e9fba5aee78c9ea71e5870f210354da540fb2673b75e6c3c994f80ad0c8431643bab28ced783cd94079bbe72445210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae5cfeda0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'multisig order',
'in': (('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x00'), {'encoding': 'multisig','fee_provided': DP['fee_provided']}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff02781e000000000000695121021ecaf7ca87f0fd78a01d9a0d62221e55beef3722db8be72d254adc40426108d02103bc14528340c37d005aa9e764ded8c038ffa94625307a450077125d580099b53b210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae4880e605000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'comment': 'multisig order',
'in': (('1_mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc_mtQheFaSfWELRB2MyMBaiWjdDm6ux9Ezns_2', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x06B,@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xfa\xf0\x80\x00\n\x00\x00\x00\x00\x00\r\xbb\xa0'), {'encoding': 'multisig'}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff02781e000000000000695121031ecaf7ca87f0fd78a01d9a0d62221e55beef3722da8be72d254e649c8261083d2102bc14528340c27d005aa9e06bcf58c038ffa946253077fea077125d580099b5bb210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae789bf505000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'comment': 'maximum quantity order',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\n\x00\x00\x00\x00\x00\x03:>\x7f\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\n\x00\x00\x00\x00\x00\r\xbb\xa0'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210248415bf04af834423d3dd7adaedc727a0308664fa0e045a51185cce50ee58759210354da540fb2673b75e6c3c994f80ad0c8431643bab28156d83cd94079bbe72452210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'dividend',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x002\x00\x00\x00\x00\x05\xf5\xe1\x00\x00\x00\x00\xa2[\xe3Kf\x00\x00\x00\x00\x00\x00\x00\x01'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e000000000000695121035a415bf04af834423d3dd7ad96dc727a030d90949e9fba5a4c21d05197e58735210254da540fb2673b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe7246f210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'dividend',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x002\x00\x00\x00\x00\x00\x00\x00\x01\x00\x06\xca\xd8\xdc\x7f\x0bf\x00\x00\x00\x00\x00\x00\x00\x01'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e000000000000695121025a415bf04af834423d3dd7ad96dc727a030865759f9fbc9036a64c1197e587c8210254da540fb2673b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe7246f210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'comment': 'free issuance',
'in': (('mn6q3dS2EnDUx3bmyWc6D4szJNVGtaR7zc', [], b'\x00\x00\x00\x14\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x03\xe8\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'), {'encoding': 'multisig'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210259415bf04af834423d3dd7adb0238d85fcf79a8a619fba5aee7a331919e487e8210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}],
},
'util': {
'base58_check_decode': [{
'in': ('16UwLL9Risc3QfPqBUvKofHmBQ7wMtjvM', b'\x00'),
'out': '010966776006953d5567439e5e39f86a0d273bee'
}, {
'in': (ADDR[0], b'\x6f'),
'out': '4838d8b3588c4c7ba7c1d06f866e9b3739c63037'
}],
'api': [{
'in': ('create_burn', {'source': ADDR[1], 'quantity': DP['burn_quantity']}),
'out': '0100000001ebe3111881a8733ace02271dcf606b7450c41a48c1cb21fd73f4ba787b353ce4000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88acffffffff02800bb203000000001976a914a11b66a67b3ff69671c8f82254099faf374b800e88ac70ae4302000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88ac00000000'
}, {
'in': ('create_send', {'source': ADDR[0], 'destination': ADDR[1], 'asset': 'XCP', 'quantity': DP['small']}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88ac781e0000000000006951210262415bf04af834423d3dd7ada4dc727a030865759f9fba5aee78c9ea71e58798210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_issuance', {'source': ADDR[0], 'transfer_destination': None, 'asset': 'BSSET', 'quantity': 1000, 'divisible': True, 'callable': False, 'call_date': None, 'call_price': None, 'description': ''}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210359415bf04af834423d3dd7adb0dc727a03086e897d9fba5aee7a331919e4871d210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_issuance', {'source': ADDR[0], 'transfer_destination': ADDR[1], 'asset': 'DIVISIBLE', 'quantity': 0, 'divisible': True, 'callable': False, 'call_date': None, 'call_price': None, 'description': ''}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000001976a9148d6ae8a3b381663118b4e1eff4cfc7d0954dd6ec88ac781e0000000000006951210259415bf04af834423d3dd7adb0dc727aa153863ef89fba5aee7a331af1e4873a210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_order', {'source': ADDR[0], 'give_asset': 'BTC', 'give_quantity': DP['small'], 'get_asset': 'XCP', 'get_quantity': DP['small'] * 2, 'expiration': DP['expiration'], 'fee_required': 0, 'fee_provided': DP['fee_provided']}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210348415bf04af834423d3dd7adaedc727a030865759e9fba5aee78c9ea71e5870f210354da540fb2673b75e6c3c994f80ad0c8431643bab28ced783cd94079bbe72445210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae5cfeda0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_order', {'source': ADDR[0], 'give_asset': 'XCP', 'give_quantity': round(DP['small'] * 2.1), 'get_asset': 'BTC', 'get_quantity': DP['small'], 'expiration': DP['expiration'], 'fee_required': DP['fee_required']}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210248415bf04af834423d3dd7adaedc727a030865759f9fba5aee7c7136b1e58715210354da540fb2663b75e6c3ce9be98ad0c8431643bab28156d83cd94079bbe72460210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_burn', {'source': MULTISIGADDR[0], 'quantity': int(DP['quantity'] / 2)}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff0280f0fa02000000001976a914a11b66a67b3ff69671c8f82254099faf374b800e88ac70c9fa02000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'in': ('create_send', {'source': ADDR[0], 'destination': MULTISIGADDR[0], 'asset': 'XCP', 'quantity': DP['quantity']}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae781e0000000000006951210362415bf04af834423d3dd7ada4dc727a030865759f9fba5aee7fc6fbf1e5875a210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_send', {'source': MULTISIGADDR[0], 'destination': ADDR[0], 'asset': 'XCP', 'quantity': DP['quantity']}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff03781e0000000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac781e0000000000006951210334caf7ca87f0fd78a01d9a0d68221e55beef3722da8be72d254dd351c26108892102bc14528340c27d005aa9e2913fd8c032ffa94625307a450077125d580099b57d210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae007df505000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'in': ('create_send', {'source': MULTISIGADDR[0], 'destination': MULTISIGADDR[1], 'asset': 'XCP', 'quantity': DP['quantity']}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff03781e0000000000004751210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b977210378ee11c3fb97054877a809ce083db292b16d971bcdc6aa4c8f92087133729d8b52ae781e0000000000006951210334caf7ca87f0fd78a01d9a0d68221e55beef3722da8be72d254dd351c26108892102bc14528340c27d005aa9e2913fd8c032ffa94625307a450077125d580099b57d210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae007df505000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'in': ('create_issuance', {'source': MULTISIGADDR[0], 'transfer_destination': None, 'asset': 'BSSET', 'quantity': 1000, 'divisible': True, 'callable': False, 'call_date': None, 'call_price': None, 'description': ''}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff02781e000000000000695121030fcaf7ca87f0fd78a01d9a0d7c221e55beef3cde388be72d254826b32a6008cb2102bc14528340c27d005aa9e2913fd8c032ffa94625307a450077125d580099b57d210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae789bf505000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'in': ('create_issuance', {'source': ADDR[0], 'transfer_destination': MULTISIGADDR[0], 'asset': 'DIVISIBLE', 'quantity': 0, 'divisible': True, 'callable': False, 'call_date': None, 'call_price': None, 'description': ''}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff03781e0000000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae781e0000000000006951210259415bf04af834423d3dd7adb0dc727aa153863ef89fba5aee7a331af1e4873a210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae14fbe90b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_issuance', {'source': ADDR[0], 'asset': 'A{}'.format(2**64 - 1), 'quantity': 1000}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e0000000000006951210259415bf04af834423d3dd7adb0238d85fcf79a8a619fba5aee7a331919e487e8210254da540fb2663b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe72447210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_order', {'source': MULTISIGADDR[0], 'give_asset': 'BTC', 'give_quantity': DP['small'], 'get_asset': 'XCP', 'get_quantity': DP['small'] * 2, 'expiration': DP['expiration'], 'fee_required': 0, 'fee_provided': DP['fee_provided']}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff02781e000000000000695121021ecaf7ca87f0fd78a01d9a0d62221e55beef3722db8be72d254adc40426108d02103bc14528340c37d005aa9e764ded8c038ffa94625307a450077125d580099b53b210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae4880e605000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'in': ('create_order', {'source': MULTISIGADDR[0], 'give_asset': 'XCP', 'give_quantity': round(DP['small'] * 2.1), 'get_asset': 'BTC', 'get_quantity': DP['small'], 'expiration': DP['expiration'], 'fee_required': DP['fee_required']}),
'out': '0100000001051511b66ba309e3dbff1fde22aefaff4190675235a010a5c6acb1e43da8005f000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752aeffffffff02781e000000000000695121031ecaf7ca87f0fd78a01d9a0d62221e55beef3722da8be72d254e649c8261083d2102bc14528340c27d005aa9e06bcf58c038ffa946253077fea077125d580099b5bb210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae789bf505000000004751210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b0210319f6e07b0b8d756156394b9dcf3b011fe9ac19f2700bd6b69a6a1783dbb8b97752ae00000000'
}, {
'in': ('create_dividend', {'source': ADDR[0], 'quantity_per_unit': DP['quantity'], 'asset': 'DIVISIBLE', 'dividend_asset': 'XCP'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e000000000000695121035a415bf04af834423d3dd7ad96dc727a030d90949e9fba5a4c21d05197e58735210254da540fb2673b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe7246f210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}, {
'in': ('create_dividend', {'source': ADDR[0], 'quantity_per_unit': 1, 'asset': 'NODIVISIBLE', 'dividend_asset': 'XCP'}),
'out': '0100000001c1d8c075936c3495f6d653c50f73d987f75448d97a750249b1eb83bee71b24ae000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788acffffffff02781e000000000000695121025a415bf04af834423d3dd7ad96dc727a030865759f9fbc9036a64c1197e587c8210254da540fb2673b75e6c3cc61190ad0c2431643bab28ced783cd94079bbe7246f210282b886c087eb37dc8182f14ba6cc3e9485ed618b95804d44aecc17c300b585b053ae8c19ea0b000000001976a9144838d8b3588c4c7ba7c1d06f866e9b3739c6303788ac00000000'
}],
'get_asset_id': [{
'in': ('BCD', 308000),
'error': ('AssetNameError', 'too short')
}, {
'in': ('ABCD', 308000),
'error': ('AssetNameError', 'non‐numeric asset name starts with ‘A’')
}, {
'in': ('A{}'.format(26**12), 308000),
'error': ('AssetNameError', 'numeric asset name not in range')
}, {
'in': ('A{}'.format(2**64), 308000),
'error': ('AssetNameError', 'numeric asset name not in range')
}, {
'in': ('A{}'.format(26**12 + 1), 308000),
'out': 26**12 + 1
}, {
'in': ('A{}'.format(2**64 - 1), 308000),
'out': 2**64 - 1
}, {
'in': ('LONGASSETNAMES', 308000),
'error': ('AssetNameError', 'long asset names must be numeric')
}, {
'in': ('BCDE_F', 308000),
'error': ('AssetNameError', "('invalid character:', '_')")
}, {
'in': ('BAAA', 308000),
'out': 26**3
}, {
'in': ('ZZZZZZZZZZZZ', 308000),
'out': 26**12 - 1
}],
'get_asset_name': [{
'in': (26**12 - 1, 308000),
'out': 'ZZZZZZZZZZZZ'
}, {
'in': (26**3, 308000),
'out': 'BAAA'
}, {
'in': (2**64 - 1, 308000),
'out': 'A{}'.format(2**64 - 1)
}, {
'in': (26**12 + 1, 308000),
'out': 'A{}'.format(26**12 + 1)
}, {
'in': (26**3 - 1, 308000),
'error': ('AssetIDError', 'too low')
}, {
'in': (2**64, 308000),
'error': ('AssetIDError', 'too high')
}]
}
}
| 147.894819
| 897
| 0.727149
| 7,348
| 94,209
| 9.195155
| 0.046407
| 0.098481
| 0.118284
| 0.120948
| 0.57082
| 0.499038
| 0.407661
| 0.327724
| 0.251073
| 0.231995
| 0
| 0.335347
| 0.123406
| 94,209
| 636
| 898
| 148.127358
| 0.482714
| 0
| 0
| 0.415748
| 0
| 0.094488
| 0.696409
| 0.489985
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001575
| 0
| 0.001575
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f8fabf4be397cdfb5992de02121d2a3a34f0c227
| 78
|
py
|
Python
|
UDP-TEXT-TRANSFER-CLIENT/rsatest.py
|
Aaronlikesmath/UDP-TEXT-TRANSFER
|
8f8fe038dab869d1fb174eb78b0d678bb8967890
|
[
"MIT"
] | null | null | null |
UDP-TEXT-TRANSFER-CLIENT/rsatest.py
|
Aaronlikesmath/UDP-TEXT-TRANSFER
|
8f8fe038dab869d1fb174eb78b0d678bb8967890
|
[
"MIT"
] | null | null | null |
UDP-TEXT-TRANSFER-CLIENT/rsatest.py
|
Aaronlikesmath/UDP-TEXT-TRANSFER
|
8f8fe038dab869d1fb174eb78b0d678bb8967890
|
[
"MIT"
] | null | null | null |
import rsa
public_key, private_key = rsa.newkeys(2048)
print (str(public_key))
| 26
| 43
| 0.794872
| 13
| 78
| 4.538462
| 0.692308
| 0.305085
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056338
| 0.089744
| 78
| 3
| 44
| 26
| 0.774648
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5d0173bb20c2cc2fcb1f53b141a85ed7ef996efc
| 220
|
py
|
Python
|
apps/commands/admin.py
|
Nuriddin-Olimjon/resume_generator
|
e114f1d1861191e5da6a6bf4c31e7b7d6304a52a
|
[
"MIT"
] | null | null | null |
apps/commands/admin.py
|
Nuriddin-Olimjon/resume_generator
|
e114f1d1861191e5da6a6bf4c31e7b7d6304a52a
|
[
"MIT"
] | null | null | null |
apps/commands/admin.py
|
Nuriddin-Olimjon/resume_generator
|
e114f1d1861191e5da6a6bf4c31e7b7d6304a52a
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Command, CommandType, CommandTypeText
# Register your models here.
admin.site.register(Command)
admin.site.register(CommandType)
admin.site.register(CommandTypeText)
| 27.5
| 57
| 0.831818
| 27
| 220
| 6.777778
| 0.481481
| 0.147541
| 0.278689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086364
| 220
| 7
| 58
| 31.428571
| 0.910448
| 0.118182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5d2ab9e156d6abddabc3403c308e2e473a17e063
| 176
|
py
|
Python
|
venv/Lib/site-packages/qtpy/tests/test_qttest.py
|
BoxicaLion/BasicMathFormulas
|
4d9782f2c0c75ecccf4c0ea995f324f93e4fb6e2
|
[
"MIT"
] | 1,520
|
2015-01-06T15:55:15.000Z
|
2022-03-31T21:50:23.000Z
|
qtpy/tests/test_qttest.py
|
phil65/qtpy
|
a228a667829be5822cf810fb06b53bf03a2b7b39
|
[
"MIT"
] | 546
|
2015-01-02T07:59:42.000Z
|
2022-03-31T12:51:29.000Z
|
qtpy/tests/test_qttest.py
|
phil65/qtpy
|
a228a667829be5822cf810fb06b53bf03a2b7b39
|
[
"MIT"
] | 328
|
2015-01-06T15:55:21.000Z
|
2022-03-28T22:07:25.000Z
|
from __future__ import absolute_import
import pytest
from qtpy import QtTest
def test_qttest():
"""Test the qtpy.QtTest namespace"""
assert QtTest.QTest is not None
| 17.6
| 40
| 0.755682
| 25
| 176
| 5.08
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 176
| 9
| 41
| 19.555556
| 0.881944
| 0.170455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| true
| 0
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.