hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
30274614d00e6758624994fb0fc84ec824bdc052
| 2,443
|
py
|
Python
|
Eager/tests/test_policy_lang.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | 3
|
2016-06-12T01:18:49.000Z
|
2018-07-16T18:20:23.000Z
|
Eager/tests/test_policy_lang.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | null | null | null |
Eager/tests/test_policy_lang.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | 1
|
2020-05-25T02:59:15.000Z
|
2020-05-25T02:59:15.000Z
|
from policy.policy_language import validate_policy
try:
from unittest import TestCase
except ImportError:
from unittest.case import TestCase
class TestPolicyLanguage(TestCase):
def test_parser_1(self):
source = """assert_app_dependency(api, 'Foo', '1.0')"""
try:
validate_policy(source)
except Exception as ex:
self.fail("Unexpected error")
def test_parser_2(self):
source = """assert_app_dependency(api, 'Foo', '1.0')
open('foo.txt','r')"""
try:
validate_policy(source)
self.fail("Invalid function did not throw exception")
except Exception as ex:
pass
def test_parser_3(self):
source = """class Foo:
pass"""
try:
validate_policy(source)
self.fail("Class definition did not throw exception")
except Exception as ex:
pass
def test_parser_4(self):
source = """def foo():
pass
foo()"""
try:
validate_policy(source)
except Exception as ex:
print ex
self.fail("Unexpected error")
def test_parser_5(self):
source = """def foo(val):
pass
foo(bar())"""
try:
validate_policy(source)
self.fail("Invalid function did not throw exception")
except Exception as ex:
pass
def test_parser_6(self):
source = """if api.owner == 'alice':
assert_not_app_dependency(api, 'Foo', '1.0')"""
try:
validate_policy(source)
except Exception as ex:
self.fail("Unexpected error")
def test_parser_7(self):
source = """from re import compile
compile('^foo')"""
try:
validate_policy(source)
except Exception as ex:
print ex
self.fail("Unexpected error")
def test_parser_8(self):
source = """import re
re.compile('^foo')"""
try:
validate_policy(source)
except Exception as ex:
print ex
self.fail("Unexpected error")
def test_parser_9(self):
source = """import os"""
try:
validate_policy(source)
self.fail("Invalid module did not throw exception")
except Exception as ex:
pass
def test_parser_10(self):
source = """from os import *"""
try:
validate_policy(source)
self.fail("Invalid module did not throw exception")
except Exception as ex:
pass
def test_parser_11(self):
source = """import __builtin__"""
try:
validate_policy(source)
self.fail("Invalid module did not throw exception")
except Exception as ex:
pass
| 23.490385
| 59
| 0.64388
| 317
| 2,443
| 4.817035
| 0.195584
| 0.11002
| 0.093648
| 0.165684
| 0.724951
| 0.724951
| 0.70465
| 0.70465
| 0.70465
| 0.669941
| 0
| 0.010243
| 0.240688
| 2,443
| 104
| 60
| 23.490385
| 0.812938
| 0
| 0
| 0.611111
| 0
| 0
| 0.271686
| 0.033552
| 0
| 0
| 0
| 0
| 0.033333
| 0
| null | null | 0.1
| 0.1
| null | null | 0.033333
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
306c759827f76456b7fa1a6983831d3c3bc45c56
| 66,192
|
py
|
Python
|
tests/test_multithreading.py
|
VlachosGroup/AIMSim
|
21cac88f8338f2683737c60f73d804cdb05ac527
|
[
"MIT"
] | 1
|
2022-01-27T05:03:01.000Z
|
2022-01-27T05:03:01.000Z
|
tests/test_multithreading.py
|
VlachosGroup/molSim
|
bf1a9f45c9138185756445bff308d4f7b8c47f7b
|
[
"MIT"
] | 16
|
2022-01-22T15:52:18.000Z
|
2022-01-30T03:10:41.000Z
|
tests/test_multithreading.py
|
VlachosGroup/AIMSim
|
21cac88f8338f2683737c60f73d804cdb05ac527
|
[
"MIT"
] | null | null | null |
""" Test multithreading to ensure consistent behavior with serial implementation."""
import unittest
import warnings
from os import remove
from os.path import exists, join
import numpy as np
from aimsim.chemical_datastructures import MoleculeSet
from time import time
from tabulate import tabulate
class TestMultithreading(unittest.TestCase):
"""Unit tests to ensure consistency when running AIMSim as a single process
or when using multiprocessing.
"""
@classmethod
def setUpClass(self):
"""Create a SMILES database to use for comparisons and
find the similarity matrices and execution times.
"""
if not exists(".speedup-test"):
print("Speedup and Efficiency tests DISABLED.")
self.NO_SPEEDUP_TEST = True
else:
self.NO_SPEEDUP_TEST = False
self.N_REPLICATES = 2
warnings.warn(
"Speedup and Efficiency tests ENABLED, expect long runtime.",
ResourceWarning,
)
print(" ~ ~ Testing Multithreading ~ ~ ", flush=True)
# basic consistency tests
self.text_fpath = "temp_multithread_smiles_seq.txt"
print(f"Creating text file {self.text_fpath}", flush=True)
with open(self.text_fpath, "w") as file:
for smiles in ["C", "CC", "CCC", "O", "CCCC", "CO", "CCOCC"]:
file.write(smiles + "\n")
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self.correct_similarity_matrix = test_molecule_set.get_similarity_matrix()
if self.NO_SPEEDUP_TEST:
return
with open(join("tests", "data", "combinatorial_1.txt"), "r") as file:
data = file.readlines()
_100_molecules = data[1:102]
_500_molecules = data[1:502]
_1000_molecules = data[1:1002]
_5000_molecules = data[1:5002]
_10000_molecules = data[1:10002]
_15000_molecules = data[1:15002]
# data used for speedup and efficiency tests
self._100_molecules_fpath = "temp_multithread_speedup_100.txt"
print(f"Creating text file {self._100_molecules_fpath}", flush=True)
with open(self._100_molecules_fpath, "w") as file:
for smiles in _100_molecules:
file.write(smiles)
print("Running 100 molecules with 1 process.", flush=True)
self._100_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
# This creates a running average across all of the replicates.
self._100_molecules_serial_time += (time() -
start) / self.N_REPLICATES
self._500_molecules_fpath = "temp_multithread_speedup_500.txt"
print(f"Creating text file {self._500_molecules_fpath}", flush=True)
with open(self._500_molecules_fpath, "w") as file:
for smiles in _500_molecules:
file.write(smiles)
print("Running 500 molecules with 1 process.", flush=True)
self._500_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._500_molecules_serial_time += (time() -
start) / self.N_REPLICATES
self._1000_molecules_fpath = "temp_multithread_speedup_1000.txt"
print(f"Creating text file {self._1000_molecules_fpath}", flush=True)
with open(self._1000_molecules_fpath, "w") as file:
for smiles in _1000_molecules:
file.write(smiles)
print("Running 1000 molecules with 1 process.", flush=True)
self._1000_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._1000_molecules_serial_time += (
time() - start) / self.N_REPLICATES
self._5000_molecules_fpath = "temp_multithread_speedup_5000.txt"
print(f"Creating text file {self._5000_molecules_fpath}", flush=True)
with open(self._5000_molecules_fpath, "w") as file:
for smiles in _5000_molecules:
file.write(smiles)
print("Running 5000 molecules with 1 process.", flush=True)
self._5000_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._5000_molecules_serial_time += (
time() - start) / self.N_REPLICATES
self._10000_molecules_fpath = "temp_multithread_speedup_10000.txt"
print(f"Creating text file {self._10000_molecules_fpath}", flush=True)
with open(self._10000_molecules_fpath, "w") as file:
for smiles in _10000_molecules:
file.write(smiles)
print("Running 10000 molecules with 1 process.", flush=True)
self._10000_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._10000_molecules_serial_time += (
time() - start) / self.N_REPLICATES
self._15000_molecules_fpath = "temp_multithread_speedup_15000.txt"
print(f"Creating text file {self._15000_molecules_fpath}", flush=True)
with open(self._15000_molecules_fpath, "w") as file:
for smiles in _15000_molecules:
file.write(smiles)
print("Running 15000 molecules with 1 process.", flush=True)
self._15000_molecules_serial_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=1,
fingerprint_type="morgan_fingerprint",
)
self._15000_molecules_serial_time += (
time() - start) / self.N_REPLICATES
# data used for speedup and efficiency test 2
print("Running 100 molecules with 1 process.", flush=True)
self._100_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._100_molecules_serial_time_2 += (
time() - start) / self.N_REPLICATES
print("Running 500 molecules with 1 process.", flush=True)
self._500_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._500_molecules_serial_time_2 += (
time() - start) / self.N_REPLICATES
print("Running 1000 molecules with 1 process.", flush=True)
self._1000_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._1000_molecules_serial_time_2 += (
time() - start) / self.N_REPLICATES
print("Running 5000 molecules with 1 process.", flush=True)
self._5000_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._5000_molecules_serial_time_2 += (
time() - start) / self.N_REPLICATES
print("Running 10000 molecules with 1 process.", flush=True)
self._10000_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._10000_molecules_serial_time_2 += (
time() - start) / self.N_REPLICATES
print("Running 15000 molecules with 1 process.", flush=True)
self._15000_molecules_serial_time_2 = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=1,
fingerprint_type="topological_fingerprint",
)
self._15000_molecules_serial_time_2 += (
time() - start) / self.N_REPLICATES
def test_multithreading_autoconfig(self):
"""
Ensure that MoleculeSet can automatically configure multiprocessing..
"""
try:
MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads='auto',
fingerprint_type="morgan_fingerprint",
)
except Exception as e:
self.fail("Multiprocessing automatic configuration failed.")
def test_multithreading_consistency_2_threads(self):
"""
Ensure that the similarity matrix produced with 2 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using two threads.",
)
def test_multithreading_consistency_3_threads(self):
"""
Ensure that the similarity matrix produced with 3 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=3,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using three threads.",
)
def test_multithreading_consistency_4_threads(self):
"""
Ensure that the similarity matrix produced with 4 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using four threads.",
)
def test_multithreading_consistency_5_threads(self):
"""
Ensure that the similarity matrix produced with 5 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=5,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using five threads.",
)
def test_multithreading_consistency_6_threads(self):
"""
Ensure that the similarity matrix produced with 6 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=6,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using six threads.",
)
def test_multithreading_consistency_7_threads(self):
"""
Ensure that the similarity matrix produced with 7 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=7,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using seven threads (equal to the number of molecules).",
)
def test_multithreading_consistency_10_threads(self):
"""
Ensure that the similarity matrix produced with 10 threads is identical to
that produced using a single thread and the serial implementation.
"""
test_molecule_set = MoleculeSet(
molecule_database_src=self.text_fpath,
molecule_database_src_type="text",
is_verbose=True,
similarity_measure="tanimoto",
n_threads=10,
fingerprint_type="morgan_fingerprint",
)
self.assertIsNone(
np.testing.assert_array_equal(
test_molecule_set.get_similarity_matrix(),
self.correct_similarity_matrix,
),
"Similarity matrix not equal when using ten threads (more than the number of molecules).",
)
def test_speedup_efficiency_tanimoto(self):
"""
Evaluate the speedup and efficieny of the multiprocessing approach.
"""
if self.NO_SPEEDUP_TEST:
return
print("~" * 10, "\n", "Speedup and Efficiency Test 1\n",
"~" * 10, flush=True)
# 100 molecules
print("Running 100 molecules with 2 processes.", flush=True)
_100_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_100_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_100_molecules_2_process_speedup = (
self._100_molecules_serial_time / _100_molecules_2_process_time
)
_100_molecules_2_process_efficiency = _100_molecules_2_process_speedup / 2
print("Running 100 molecules with 4 processes.", flush=True)
_100_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_100_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_100_molecules_4_process_speedup = (
self._100_molecules_serial_time / _100_molecules_4_process_time
)
_100_molecules_4_process_efficiency = _100_molecules_4_process_speedup / 4
print("Running 100 molecules with 8 processes.", flush=True)
_100_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_100_molecules_8_process_time += (time() -
start) / self.N_REPLICATES
_100_molecules_8_process_speedup = (
self._100_molecules_serial_time / _100_molecules_8_process_time
)
_100_molecules_8_process_efficiency = _100_molecules_8_process_speedup / 8
# 500 molecules
print("Running 500 molecules with 2 processes.", flush=True)
_500_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_500_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_500_molecules_2_process_speedup = (
self._500_molecules_serial_time / _500_molecules_2_process_time
)
_500_molecules_2_process_efficiency = _500_molecules_2_process_speedup / 2
print("Running 500 molecules with 4 processes.", flush=True)
_500_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_500_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_500_molecules_4_process_speedup = (
self._500_molecules_serial_time / _500_molecules_4_process_time
)
_500_molecules_4_process_efficiency = _500_molecules_4_process_speedup / 4
print("Running 500 molecules with 8 processes.", flush=True)
_500_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_500_molecules_8_process_time += (time() -
start) / self.N_REPLICATES
_500_molecules_8_process_speedup = (
self._500_molecules_serial_time / _500_molecules_8_process_time
)
_500_molecules_8_process_efficiency = _500_molecules_8_process_speedup / 8
# 1000 molecules
print("Running 1000 molecules with 2 processes.", flush=True)
_1000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_1000_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_1000_molecules_2_process_speedup = (
self._1000_molecules_serial_time / _1000_molecules_2_process_time
)
_1000_molecules_2_process_efficiency = _1000_molecules_2_process_speedup / 2
print("Running 1000 molecules with 4 processes.", flush=True)
_1000_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_1000_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_1000_molecules_4_process_speedup = (
self._1000_molecules_serial_time / _1000_molecules_4_process_time
)
_1000_molecules_4_process_efficiency = _1000_molecules_4_process_speedup / 4
print("Running 1000 molecules with 8 processes.", flush=True)
_1000_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_1000_molecules_8_process_time += (time() -
start) / self.N_REPLICATES
_1000_molecules_8_process_speedup = (
self._1000_molecules_serial_time / _1000_molecules_8_process_time
)
_1000_molecules_8_process_efficiency = _1000_molecules_8_process_speedup / 8
print("Running 5000 molecules with 2 processes.", flush=True)
# 5000 molecules
_5000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_5000_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_5000_molecules_2_process_speedup = (
self._5000_molecules_serial_time / _5000_molecules_2_process_time
)
_5000_molecules_2_process_efficiency = _5000_molecules_2_process_speedup / 2
print("Running 5000 molecules with 4 processes.", flush=True)
_5000_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_5000_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_5000_molecules_4_process_speedup = (
self._5000_molecules_serial_time / _5000_molecules_4_process_time
)
_5000_molecules_4_process_efficiency = _5000_molecules_4_process_speedup / 4
print("Running 5000 molecules with 8 processes.", flush=True)
_5000_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_5000_molecules_8_process_time += (time() -
start) / self.N_REPLICATES
_5000_molecules_8_process_speedup = (
self._5000_molecules_serial_time / _5000_molecules_8_process_time
)
_5000_molecules_8_process_efficiency = _5000_molecules_8_process_speedup / 8
# 10000 molecules
print("Running 10000 molecules with 2 processes.", flush=True)
_10000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_10000_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_10000_molecules_2_process_speedup = (
self._10000_molecules_serial_time / _10000_molecules_2_process_time
)
_10000_molecules_2_process_efficiency = _10000_molecules_2_process_speedup / 2
print("Running 10000 molecules with 4 processes.", flush=True)
_10000_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_10000_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_10000_molecules_4_process_speedup = (
self._10000_molecules_serial_time / _10000_molecules_4_process_time
)
_10000_molecules_4_process_efficiency = _10000_molecules_4_process_speedup / 4
print("Running 10000 molecules with 8 processes.", flush=True)
_10000_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_10000_molecules_8_process_time += (
time() - start) / self.N_REPLICATES
_10000_molecules_8_process_speedup = (
self._10000_molecules_serial_time / _10000_molecules_8_process_time
)
_10000_molecules_8_process_efficiency = (
_10000_molecules_8_process_speedup / 8
)
# 15000 molecules
print("Running 15000 molecules with 2 processes.", flush=True)
_15000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=2,
fingerprint_type="morgan_fingerprint",
)
_15000_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_15000_molecules_2_process_speedup = (
self._15000_molecules_serial_time / _15000_molecules_2_process_time
)
_15000_molecules_2_process_efficiency = _15000_molecules_2_process_speedup / 2
print("Running 15000 molecules with 4 processes.", flush=True)
_15000_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=4,
fingerprint_type="morgan_fingerprint",
)
_15000_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_15000_molecules_4_process_speedup = (
self._15000_molecules_serial_time / _15000_molecules_4_process_time
)
_15000_molecules_4_process_efficiency = _15000_molecules_4_process_speedup / 4
print("Running 15000 molecules with 8 processes.", flush=True)
_15000_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="tanimoto",
n_threads=8,
fingerprint_type="morgan_fingerprint",
)
_15000_molecules_8_process_time += (
time() - start) / self.N_REPLICATES
_15000_molecules_8_process_speedup = (
self._15000_molecules_serial_time / _15000_molecules_8_process_time
)
_15000_molecules_8_process_efficiency = (
_15000_molecules_8_process_speedup / 8
)
print("Speedup:", flush=True)
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
_100_molecules_2_process_speedup,
_100_molecules_4_process_speedup,
_100_molecules_8_process_speedup,
],
[
500,
_500_molecules_2_process_speedup,
_500_molecules_4_process_speedup,
_500_molecules_8_process_speedup,
],
[
1000,
_1000_molecules_2_process_speedup,
_1000_molecules_4_process_speedup,
_1000_molecules_8_process_speedup,
],
[
5000,
_5000_molecules_2_process_speedup,
_5000_molecules_4_process_speedup,
_5000_molecules_8_process_speedup,
],
[
10000,
_10000_molecules_2_process_speedup,
_10000_molecules_4_process_speedup,
_10000_molecules_8_process_speedup,
],
[
15000,
_15000_molecules_2_process_speedup,
_15000_molecules_4_process_speedup,
_15000_molecules_8_process_speedup,
],
],
headers=["# mol", "", "# processes", ""],
)
)
print("Efficiency:", flush=True)
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
_100_molecules_2_process_efficiency,
_100_molecules_4_process_efficiency,
_100_molecules_8_process_efficiency,
],
[
500,
_500_molecules_2_process_efficiency,
_500_molecules_4_process_efficiency,
_500_molecules_8_process_efficiency,
],
[
1000,
_1000_molecules_2_process_efficiency,
_1000_molecules_4_process_efficiency,
_1000_molecules_8_process_efficiency,
],
[
5000,
_5000_molecules_2_process_efficiency,
_5000_molecules_4_process_efficiency,
_5000_molecules_8_process_efficiency,
],
[
10000,
_10000_molecules_2_process_efficiency,
_10000_molecules_4_process_efficiency,
_10000_molecules_8_process_efficiency,
],
[
15000,
_15000_molecules_2_process_efficiency,
_15000_molecules_4_process_efficiency,
_15000_molecules_8_process_efficiency,
],
],
headers=["# mol", "", "# processes", ""],
)
)
print("Execution Time in seconds (serial/parallel):", flush=True)
print(
tabulate(
[
["~", 1, 2, 4, 8],
[
100,
"{:.2f}".format(
float(self._100_molecules_serial_time),
),
"{:.2f}".format(
float(_100_molecules_2_process_time),
),
"{:.2f}".format(
float(_100_molecules_4_process_time),
),
"{:.2f}".format(
float(_100_molecules_8_process_time),
),
],
[
500,
"{:.2f}".format(
float(self._500_molecules_serial_time),
),
"{:.2f}".format(
float(_500_molecules_2_process_time),
),
"{:.2f}".format(
float(_500_molecules_4_process_time),
),
"{:.2f}".format(
float(_500_molecules_8_process_time),
),
],
[
1000,
"{:.2f}".format(
float(self._1000_molecules_serial_time),
),
"{:.2f}".format(
float(_1000_molecules_2_process_time),
),
"{:.2f}".format(
float(_1000_molecules_4_process_time),
),
"{:2f}".format(
float(_1000_molecules_8_process_time),
),
],
[
5000,
"{:.2f}".format(
float(self._5000_molecules_serial_time),
),
"{:.2f}".format(
float(_5000_molecules_2_process_time),
),
"{:.2f}".format(
float(_5000_molecules_4_process_time),
),
"{:.2f}".format(
float(_5000_molecules_8_process_time),
),
],
[
10000,
"{:.2f}".format(
float(self._10000_molecules_serial_time),
),
"{:.2f}".format(
float(_10000_molecules_2_process_time),
),
"{:.2f}".format(
float(_10000_molecules_4_process_time),
),
"{:.2f}".format(
float(_10000_molecules_8_process_time),
),
],
[
15000,
"{:.2f}".format(
float(self._15000_molecules_serial_time),
),
"{:.2f}".format(
float(_15000_molecules_2_process_time),
),
"{:.2f}".format(
float(_15000_molecules_4_process_time),
),
"{:.2f}".format(
float(_15000_molecules_8_process_time),
),
],
],
headers=["# mol", "", "", "# processes", ""],
)
)
def test_speedup_efficiency_cosine(self):
"""
Evaluate the speedup and efficieny of the multiprocessing approach
with a more complex metric.
"""
if self.NO_SPEEDUP_TEST:
return
print("~" * 10, "\n", "Speedup and Efficiency Test 2\n",
"~" * 10, flush=True)
# 100 molecules
print("Running 100 molecules with 2 processes.", flush=True)
_100_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_100_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_100_molecules_2_process_speedup = (
self._100_molecules_serial_time_2 / _100_molecules_2_process_time
)
_100_molecules_2_process_efficiency = _100_molecules_2_process_speedup / 2
print("Running 100 molecules with 4 processes.", flush=True)
_100_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_100_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_100_molecules_4_process_speedup = (
self._100_molecules_serial_time_2 / _100_molecules_4_process_time
)
_100_molecules_4_process_efficiency = _100_molecules_4_process_speedup / 4
print("Running 100 molecules with 8 processes.", flush=True)
_100_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._100_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_100_molecules_8_process_time += (time() -
start) / self.N_REPLICATES
_100_molecules_8_process_speedup = (
self._100_molecules_serial_time_2 / _100_molecules_8_process_time
)
_100_molecules_8_process_efficiency = _100_molecules_8_process_speedup / 8
# 500 molecules
print("Running 500 molecules with 2 processes.", flush=True)
_500_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_500_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_500_molecules_2_process_speedup = (
self._500_molecules_serial_time_2 / _500_molecules_2_process_time
)
_500_molecules_2_process_efficiency = _500_molecules_2_process_speedup / 2
print("Running 500 molecules with 4 processes.", flush=True)
_500_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_500_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_500_molecules_4_process_speedup = (
self._500_molecules_serial_time_2 / _500_molecules_4_process_time
)
_500_molecules_4_process_efficiency = _500_molecules_4_process_speedup / 4
print("Running 500 molecules with 8 processes.", flush=True)
_500_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._500_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_500_molecules_8_process_time += (time() -
start) / self.N_REPLICATES
_500_molecules_8_process_speedup = (
self._500_molecules_serial_time_2 / _500_molecules_8_process_time
)
_500_molecules_8_process_efficiency = _500_molecules_8_process_speedup / 8
# 1000 molecules
print("Running 1000 molecules with 2 processes.", flush=True)
_1000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_1000_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_1000_molecules_2_process_speedup = (
self._1000_molecules_serial_time_2 / _1000_molecules_2_process_time
)
_1000_molecules_2_process_efficiency = _1000_molecules_2_process_speedup / 2
print("Running 1000 molecules with 4 processes.", flush=True)
_1000_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_1000_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_1000_molecules_4_process_speedup = (
self._1000_molecules_serial_time_2 / _1000_molecules_4_process_time
)
_1000_molecules_4_process_efficiency = _1000_molecules_4_process_speedup / 4
print("Running 1000 molecules with 8 processes.", flush=True)
_1000_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._1000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_1000_molecules_8_process_time += (time() -
start) / self.N_REPLICATES
_1000_molecules_8_process_speedup = (
self._1000_molecules_serial_time_2 / _1000_molecules_8_process_time
)
_1000_molecules_8_process_efficiency = _1000_molecules_8_process_speedup / 8
# 5000 molecules
print("Running 5000 molecules with 2 processes.", flush=True)
_5000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_5000_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_5000_molecules_2_process_speedup = (
self._5000_molecules_serial_time_2 / _5000_molecules_2_process_time
)
_5000_molecules_2_process_efficiency = _5000_molecules_2_process_speedup / 2
print("Running 5000 molecules with 4 processes.", flush=True)
_5000_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_5000_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_5000_molecules_4_process_speedup = (
self._5000_molecules_serial_time_2 / _5000_molecules_4_process_time
)
_5000_molecules_4_process_efficiency = _5000_molecules_4_process_speedup / 4
print("Running 5000 molecules with 8 processes.", flush=True)
_5000_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._5000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_5000_molecules_8_process_time += (time() -
start) / self.N_REPLICATES
_5000_molecules_8_process_speedup = (
self._5000_molecules_serial_time_2 / _5000_molecules_8_process_time
)
_5000_molecules_8_process_efficiency = _5000_molecules_8_process_speedup / 8
# 10000 molecules
print("Running 10000 molecules with 2 processes.", flush=True)
_10000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_10000_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_10000_molecules_2_process_speedup = (
self._10000_molecules_serial_time_2 / _10000_molecules_2_process_time
)
_10000_molecules_2_process_efficiency = _10000_molecules_2_process_speedup / 2
print("Running 10000 molecules with 4 processes.", flush=True)
_10000_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_10000_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_10000_molecules_4_process_speedup = (
self._10000_molecules_serial_time_2 / _10000_molecules_4_process_time
)
_10000_molecules_4_process_efficiency = _10000_molecules_4_process_speedup / 4
print("Running 10000 molecules with 8 processes.", flush=True)
_10000_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._10000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_10000_molecules_8_process_time += (
time() - start) / self.N_REPLICATES
_10000_molecules_8_process_speedup = (
self._10000_molecules_serial_time_2 / _10000_molecules_8_process_time
)
_10000_molecules_8_process_efficiency = (
_10000_molecules_8_process_speedup / 8
)
# 15000 molecules
print("Running 15000 molecules with 2 processes.", flush=True)
_15000_molecules_2_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=2,
fingerprint_type="topological_fingerprint",
)
_15000_molecules_2_process_time += (time() -
start) / self.N_REPLICATES
_15000_molecules_2_process_speedup = (
self._15000_molecules_serial_time_2 / _15000_molecules_2_process_time
)
_15000_molecules_2_process_efficiency = _15000_molecules_2_process_speedup / 2
print("Running 15000 molecules with 4 processes.", flush=True)
_15000_molecules_4_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=4,
fingerprint_type="topological_fingerprint",
)
_15000_molecules_4_process_time += (time() -
start) / self.N_REPLICATES
_15000_molecules_4_process_speedup = (
self._15000_molecules_serial_time_2 / _15000_molecules_4_process_time
)
_15000_molecules_4_process_efficiency = _15000_molecules_4_process_speedup / 4
print("Running 15000 molecules with 8 processes.", flush=True)
_15000_molecules_8_process_time = 0
for i in range(self.N_REPLICATES):
start = time()
test_molecule_set = MoleculeSet(
molecule_database_src=self._15000_molecules_fpath,
molecule_database_src_type="text",
is_verbose=False,
similarity_measure="cosine",
n_threads=8,
fingerprint_type="topological_fingerprint",
)
_15000_molecules_8_process_time += (
time() - start) / self.N_REPLICATES
_15000_molecules_8_process_speedup = (
self._15000_molecules_serial_time_2 / _15000_molecules_8_process_time
)
_15000_molecules_8_process_efficiency = (
_15000_molecules_8_process_speedup / 8
)
print("Speedup:", flush=True)
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
_100_molecules_2_process_speedup,
_100_molecules_4_process_speedup,
_100_molecules_8_process_speedup,
],
[
500,
_500_molecules_2_process_speedup,
_500_molecules_4_process_speedup,
_500_molecules_8_process_speedup,
],
[
1000,
_1000_molecules_2_process_speedup,
_1000_molecules_4_process_speedup,
_1000_molecules_8_process_speedup,
],
[
5000,
_5000_molecules_2_process_speedup,
_5000_molecules_4_process_speedup,
_5000_molecules_8_process_speedup,
],
[
10000,
_10000_molecules_2_process_speedup,
_10000_molecules_4_process_speedup,
_10000_molecules_8_process_speedup,
],
[
15000,
_15000_molecules_2_process_speedup,
_15000_molecules_4_process_speedup,
_15000_molecules_8_process_speedup,
],
],
headers=["# mol", "", "# processes", ""],
)
)
print("Efficiency:", flush=True)
print(
tabulate(
[
["~", 2, 4, 8],
[
100,
_100_molecules_2_process_efficiency,
_100_molecules_4_process_efficiency,
_100_molecules_8_process_efficiency,
],
[
500,
_500_molecules_2_process_efficiency,
_500_molecules_4_process_efficiency,
_500_molecules_8_process_efficiency,
],
[
1000,
_1000_molecules_2_process_efficiency,
_1000_molecules_4_process_efficiency,
_1000_molecules_8_process_efficiency,
],
[
5000,
_5000_molecules_2_process_efficiency,
_5000_molecules_4_process_efficiency,
_5000_molecules_8_process_efficiency,
],
[
10000,
_10000_molecules_2_process_efficiency,
_10000_molecules_4_process_efficiency,
_10000_molecules_8_process_efficiency,
],
[
15000,
_15000_molecules_2_process_efficiency,
_15000_molecules_4_process_efficiency,
_15000_molecules_8_process_efficiency,
],
],
headers=["# mol", "", "# processes", ""],
)
)
print("Execution Time in seconds (serial/parallel):", flush=True)
print(
tabulate(
[
["~", 1, 2, 4, 8],
[
100,
"{:.2f}".format(
float(self._100_molecules_serial_time_2),
),
"{:.2f}".format(
float(_100_molecules_2_process_time),
),
"{:.2f}".format(
float(_100_molecules_4_process_time),
),
"{:.2f}".format(
float(_100_molecules_8_process_time),
),
],
[
500,
"{:.2f}".format(
float(self._500_molecules_serial_time_2),
),
"{:.2f}".format(
float(_500_molecules_2_process_time),
),
"{:.2f}".format(
float(_500_molecules_4_process_time),
),
"{:.2f}".format(
float(_500_molecules_8_process_time),
),
],
[
1000,
"{:.2f}".format(
float(self._1000_molecules_serial_time_2),
),
"{:.2f}".format(
float(_1000_molecules_2_process_time),
),
"{:.2f}".format(
float(_1000_molecules_4_process_time),
),
"{:2f}".format(
float(_1000_molecules_8_process_time),
),
],
[
5000,
"{:.2f}".format(
float(self._5000_molecules_serial_time_2),
),
"{:.2f}".format(
float(_5000_molecules_2_process_time),
),
"{:.2f}".format(
float(_5000_molecules_4_process_time),
),
"{:.2f}".format(
float(_5000_molecules_8_process_time),
),
],
[
10000,
"{:.2f}".format(
float(self._10000_molecules_serial_time_2),
),
"{:.2f}".format(
float(_10000_molecules_2_process_time),
),
"{:.2f}".format(
float(_10000_molecules_4_process_time),
),
"{:.2f}".format(
float(_10000_molecules_8_process_time),
),
],
[
15000,
"{:.2f}".format(
float(self._15000_molecules_serial_time_2),
),
"{:.2f}".format(
float(_15000_molecules_2_process_time),
),
"{:.2f}".format(
float(_15000_molecules_4_process_time),
),
"{:.2f}".format(
float(_15000_molecules_8_process_time),
),
],
],
headers=["# mol", "", "", "# processes", ""],
)
)
@classmethod
def tearDownClass(self):
"""Delete temporary files used in testing."""
print("Deleting smiles database files.", flush=True)
remove(self.text_fpath)
if not self.NO_SPEEDUP_TEST:
remove(self._100_molecules_fpath)
remove(self._500_molecules_fpath)
remove(self._1000_molecules_fpath)
remove(self._5000_molecules_fpath)
remove(self._10000_molecules_fpath)
remove(self._15000_molecules_fpath)
print(" ~ ~ Multithreading Test Complete ~ ~ ", flush=True)
if __name__ == "__main__":
unittest.main()
| 42.622022
| 103
| 0.541591
| 6,158
| 66,192
| 5.315362
| 0.03329
| 0.048393
| 0.066174
| 0.052242
| 0.938195
| 0.925272
| 0.913326
| 0.899823
| 0.892643
| 0.872999
| 0
| 0.074489
| 0.390742
| 66,192
| 1,552
| 104
| 42.649485
| 0.737155
| 0.028659
| 0
| 0.767882
| 0
| 0
| 0.087084
| 0.014843
| 0
| 0
| 0
| 0
| 0.009818
| 1
| 0.008415
| false
| 0
| 0.00561
| 0
| 0.01683
| 0.091164
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
234546bdea84a9cb327c1c565ecc14262aa650fe
| 8,001
|
py
|
Python
|
usaspending_api/awards/migrations/0002_auto_20160912_1756.py
|
toolness/usaspending-api
|
ed9a396e20a52749f01f43494763903cc371f9c2
|
[
"CC0-1.0"
] | 1
|
2021-06-17T05:09:00.000Z
|
2021-06-17T05:09:00.000Z
|
usaspending_api/awards/migrations/0002_auto_20160912_1756.py
|
toolness/usaspending-api
|
ed9a396e20a52749f01f43494763903cc371f9c2
|
[
"CC0-1.0"
] | null | null | null |
usaspending_api/awards/migrations/0002_auto_20160912_1756.py
|
toolness/usaspending-api
|
ed9a396e20a52749f01f43494763903cc371f9c2
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-12 17:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('references', '0001_initial'),
('accounts', '0001_initial'),
('awards', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FinancialAccountsByAwards',
fields=[
('financial_accounts_by_awards_id', models.AutoField(primary_key=True, serialize=False)),
('program_activity_name', models.CharField(blank=True, max_length=164, null=True)),
('by_direct_reimbursable_fun', models.CharField(blank=True, max_length=1, null=True)),
('piid', models.CharField(blank=True, max_length=50, null=True)),
('parent_award_id', models.CharField(blank=True, max_length=50, null=True)),
('fain', models.CharField(blank=True, max_length=30, null=True)),
('uri', models.CharField(blank=True, max_length=70, null=True)),
('award_type', models.CharField(blank=True, max_length=30, null=True)),
('ussgl480100_undelivered_or_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl480100_undelivered_or_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl483100_undelivered_or_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl488100_upward_adjustm_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl490100_delivered_orde_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl490100_delivered_orde_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl493100_delivered_orde_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl498100_upward_adjustm_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl480200_undelivered_or_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl480200_undelivered_or_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl483200_undelivered_or_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl488200_upward_adjustm_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl490200_delivered_orde_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl490800_authority_outl_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl490800_authority_outl_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl498200_upward_adjustm_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('obligations_undelivered_or_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('obligations_delivered_orde_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('obligations_delivered_orde_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('gross_outlays_undelivered_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('gross_outlays_undelivered_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('gross_outlays_delivered_or_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('gross_outlay_amount_by_awa_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('gross_outlay_amount_by_awa_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('obligations_incurred_byawa_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl487100_downward_adjus_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl497100_downward_adjus_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl487200_downward_adjus_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('ussgl497200_downward_adjus_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('deobligations_recov_by_awa_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('obligations_undelivered_or_fyb', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('gross_outlays_delivered_or_cpe', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('drv_award_id_field_type', models.CharField(blank=True, max_length=10, null=True)),
('drv_oblig_incur_total_by_award', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('reporting_period_start', models.DateField(blank=True, null=True)),
('reporting_period_end', models.DateField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('update_date', models.DateTimeField(blank=True, null=True)),
('create_user_id', models.CharField(blank=True, max_length=50, null=True)),
('update_user_id', models.CharField(blank=True, max_length=50, null=True)),
('appropriation_account_balances', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='accounts.AppropriationAccountBalances')),
('object_class', models.ForeignKey(db_column='object_class', on_delete=django.db.models.deletion.DO_NOTHING, to='references.RefObjectClassCode')),
('program_activity_code', models.ForeignKey(blank=True, db_column='program_activity_code', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='references.RefProgramActivity')),
],
options={
'db_table': 'financial_accounts_by_awards',
'managed': True,
},
),
migrations.CreateModel(
name='FinancialAccountsByAwardsTransactionObligations',
fields=[
('financial_accounts_by_awards_transaction_obligations_id', models.AutoField(primary_key=True, serialize=False)),
('transaction_obligated_amou', models.DecimalField(blank=True, decimal_places=0, max_digits=21, null=True)),
('reporting_period_start', models.DateField(blank=True, null=True)),
('reporting_period_end', models.DateField(blank=True, null=True)),
('create_date', models.DateTimeField(blank=True, null=True)),
('update_date', models.DateTimeField(blank=True, null=True)),
('create_user_id', models.CharField(blank=True, max_length=50, null=True)),
('update_user_id', models.CharField(blank=True, max_length=50, null=True)),
('financial_accounts_by_awards', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='awards.FinancialAccountsByAwards')),
],
options={
'db_table': 'financial_accounts_by_awards_transaction_obligations',
'managed': True,
},
),
]
| 82.484536
| 203
| 0.678415
| 939
| 8,001
| 5.489883
| 0.153355
| 0.096023
| 0.151697
| 0.17808
| 0.80388
| 0.792435
| 0.759845
| 0.73676
| 0.719302
| 0.694277
| 0
| 0.042609
| 0.193351
| 8,001
| 96
| 204
| 83.34375
| 0.75612
| 0.008499
| 0
| 0.269663
| 1
| 0
| 0.237453
| 0.200883
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.033708
| 0
| 0.067416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aee59bf8baeb9744303188c07dc017ca22b92a5c
| 2,738
|
py
|
Python
|
hwmapping/notebook/schedule.py
|
phip123/workload-aware-k8s
|
79e047916b7239467f299bd2ad605c6ac375cbca
|
[
"MIT"
] | 5
|
2021-03-08T10:27:27.000Z
|
2022-03-24T14:37:17.000Z
|
hwmapping/notebook/schedule.py
|
phip123/workload-aware-k8s
|
79e047916b7239467f299bd2ad605c6ac375cbca
|
[
"MIT"
] | null | null | null |
hwmapping/notebook/schedule.py
|
phip123/workload-aware-k8s
|
79e047916b7239467f299bd2ad605c6ac375cbca
|
[
"MIT"
] | null | null | null |
import pandas as pd
def get_total_and_failed_schedules(df: pd.DataFrame) -> pd.DataFrame:
queued = get_total_failed_schedules_per_fn(df)
summed = queued.groupby(['workload', 'type_run', 'devices']).sum()
diff_perc = summed[['value', 'failed']].apply(lambda row: ((row['failed']) / row['value']),
axis=1)
summed['failed_perc'] = diff_perc
cols = ['workload', 'type_run', 'value', 'failed', 'devices', 'failed_perc']
return summed.reset_index()[cols].sort_values(by=['workload', 'type_run', 'devices'])
def get_total_failed_schedules_per_fn(df: pd.DataFrame) -> pd.DataFrame:
copy = df.copy()
successful = copy[copy['successful'] == True]
successful = successful[successful['value'] == 'finish'].groupby(
['workload', 'type_run', 'function_name', 'devices']).count()
queued = copy[copy['value'] == 'queue'].groupby(['workload', 'type_run', 'function_name', 'devices']).count()
joined_s_q = successful.join(queued, lsuffix='_s')
# value_s == successful, value == all => value_s <= value
diff = joined_s_q[['value_s', 'value']].apply(lambda row: (row['value'] - row['value_s']), axis=1)
diff_perc = joined_s_q[['value_s', 'value']].apply(lambda row: (1 - (row['value_s']) / row['value']),
axis=1)
queued['failed'] = diff
queued['failed_perc'] = diff_perc
return queued
def get_total_and_failed_schedules_per_node_type(df: pd.DataFrame) -> pd.DataFrame:
queued = get_total_failed_schedules_per_fn(df)
summed = queued.groupby(['workload', 'type_run', 'devices']).sum()
cols = ['workload', 'type_run', 'value', 'failed', 'devices', 'failed_perc']
return summed.reset_index()[cols].sort_values(by=['workload', 'type_run', 'devices'])
def get_total_failed_schedules_per_node(df: pd.DataFrame) -> pd.DataFrame:
copy = df.copy()
successful = copy[copy['successful'] == True]
successful = successful[successful['value'] == 'finish'].groupby(
['workload', 'type_run', 'function_name', 'devices']).count()
queued = copy[copy['value'] == 'queue'].groupby(['workload', 'type_run', 'function_name', 'devices']).count()
joined_s_q = successful.join(queued, lsuffix='_s')
# value_s == successful, value == all => value_s <= value
diff = joined_s_q[['value_s', 'value']].apply(lambda row: (row['value'] - row['value_s']), axis=1)
diff_perc = joined_s_q[['value_s', 'value']].apply(lambda row: 1 - (row['value_s'] / row['value']),
axis=1)
queued['failed'] = diff
queued['failed_perc'] = diff_perc / len(df['function_name'].unique())
return queued
| 53.686275
| 113
| 0.622352
| 343
| 2,738
| 4.714286
| 0.157434
| 0.044527
| 0.092764
| 0.081633
| 0.899196
| 0.899196
| 0.863327
| 0.86209
| 0.86209
| 0.86209
| 0
| 0.003165
| 0.192111
| 2,738
| 50
| 114
| 54.76
| 0.727848
| 0.040541
| 0
| 0.725
| 0
| 0
| 0.216463
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.025
| 0
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aef2ad95f296e439eed87d995cbe5d9b638c4d52
| 4,487
|
py
|
Python
|
Gress.py
|
RengeRenge/BasCreater
|
34b6f1f9f482bcbd4debfe85c6513cd653380fba
|
[
"MIT"
] | null | null | null |
Gress.py
|
RengeRenge/BasCreater
|
34b6f1f9f482bcbd4debfe85c6513cd653380fba
|
[
"MIT"
] | null | null | null |
Gress.py
|
RengeRenge/BasCreater
|
34b6f1f9f482bcbd4debfe85c6513cd653380fba
|
[
"MIT"
] | null | null | null |
from array import array
from lib import Bas
import random
gressl = Bas.BasType({
"content": "⎝",
"bold": 0,
"textShadow": 0,
"anchorY": 1,
"anchorX": 1,
"fontSize": "30%",
"color": "0x8FBC8F",
"y": "125%",
"x": "30%",
"rotateZ": 0,
})
gressm = Bas.BasType({
"content": "⎝",
"bold": 0,
"textShadow": 0,
"anchorY": 1,
"anchorX": 1,
"fontSize": "30%",
"color": "0x8FBC8F",
"y": "125%",
"x": "30%",
"rotateZ": 10,
})
gressr = Bas.BasType({
"content": "⎠",
"bold": 0,
"textShadow": 0,
"anchorY": 1,
"anchorX": 0,
"fontSize": "30%",
"color": "0x8FBC8F",
"y": "125%",
"x": "30%",
"rotateZ": -15,
})
gress_types = [gressl, gressm, gressr]
def create_one(start_time, start_x, end_x, colors: array, included_angle: array):
animate_space = 0.2
duration_of_offset = 0.7
offset_x_of_angle_animate = 6
obj_group: array[Bas.BasObject] = []
for i, color in enumerate(colors):
g = gress_types[i]
angle = included_angle[i] + g.get_float('rotateZ')
obj_group.append(
Bas.BasObject(
g, {"color": color, "rotateZ": f"{angle}", "x": f"{start_x - i * 2}%"})
)
animate_group: array[Bas.BasAnimate] = []
# dislpay
for g in obj_group:
animate = Bas.BasAnimate().animate(
g,
duration=start_time
).animate(
g,
duration=0.1,
attribute={"alpha": 1}
)
animate_group.append(animate)
offsetX = offset_x_of_angle_animate * animate_space
for i, obj in enumerate(obj_group):
if i > 0:
animate = animate_group[i]
x1 = obj.get_float('x') - offsetX
animate.animate(obj, duration=animate_space,
attribute={"x": f"{x1}%"})
while obj_group[-1].get_float('x') > end_x:
left = animate_group[0].count % 2
for i, obj in enumerate(obj_group):
animate = animate_group[i]
rorate = (random.uniform(-1, -0.5)
if left else random.uniform(5, 6))
x = obj.get_float('x')
rorate_z = included_angle[i] + gress_types[i].get_float('rotateZ')
animate.animate(obj, duration=duration_of_offset, attribute={
"rotateZ": rorate_z + rorate, "x": f"{x - offset_x_of_angle_animate}%"})
for ani in animate_group:
ani.finish()
def create_one_to_end(start_time, start_x, colors: array, included_angle: array):
animate_space = 0.2
duration_of_offset = 0.7
offset_of_angle_animate = 5
obj_group: array[Bas.BasObject] = []
for i, color in enumerate(colors):
angle = included_angle[i]
obj_group.append(
Bas.BasObject(
gressl, {"color": color, "rotateZ": f"{angle}", "x": f"{start_x + i * 2}%"})
)
# g1 = Bas.BasObject(gress, {"rotateZ": -5, "x": f"{start_x}%"})
# g2 = Bas.BasObject(gress, {"rotateZ": 2, "x": f"{start_x + 2}%"})
# g3 = Bas.BasObject(gress, {"rotateZ": 6, "x": f"{start_x + 4}%"})
animate_group: array[Bas.BasAnimate] = []
# dislpay
for g in obj_group:
animate = Bas.BasAnimate().animate(
g,
duration=start_time
).animate(
g,
duration=0.1,
attribute={"alpha": 1}
)
animate_group.append(animate)
offsetX = offset_of_angle_animate * animate_space
for i, obj in enumerate(obj_group):
if i > 0:
animate = animate_group[i]
x1 = obj.get_float('x') - offsetX
animate.animate(obj, duration=animate_space,
attribute={"x": f"{x1}%"})
while obj_group[-1].get_float('x') > -20:
left = animate_group[0].count % 2
for i, obj in enumerate(obj_group):
animate = animate_group[i]
rorate = (random.uniform(-1, -0.5)
if left else random.uniform(5, 6))
x = obj.get_float('x')
rorate_z = included_angle[i]
animate.animate(obj, duration=duration_of_offset, attribute={
"rotateZ": rorate_z + rorate, "x": f"{x - offset_of_angle_animate}%"})
for ani in animate_group:
ani.finish()
| 29.326797
| 93
| 0.520169
| 538
| 4,487
| 4.163569
| 0.154275
| 0.042857
| 0.0375
| 0.017857
| 0.818304
| 0.7875
| 0.7875
| 0.773661
| 0.773661
| 0.757143
| 0
| 0.032333
| 0.331402
| 4,487
| 152
| 94
| 29.519737
| 0.713333
| 0.046802
| 0
| 0.731707
| 0
| 0
| 0.102938
| 0.012624
| 0
| 0
| 0.005827
| 0
| 0
| 1
| 0.01626
| false
| 0
| 0.02439
| 0
| 0.04065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9de0a0498534dbc4e4a0fac1b2c3ef7245ecdd0a
| 47
|
py
|
Python
|
gym_wrapper/__init__.py
|
ebola777/gym-demo
|
6e075870020303fc073515df79f9a8da655d5dc3
|
[
"MIT"
] | null | null | null |
gym_wrapper/__init__.py
|
ebola777/gym-demo
|
6e075870020303fc073515df79f9a8da655d5dc3
|
[
"MIT"
] | null | null | null |
gym_wrapper/__init__.py
|
ebola777/gym-demo
|
6e075870020303fc073515df79f9a8da655d5dc3
|
[
"MIT"
] | null | null | null |
from gym_wrapper.gym_wrapper import GymWrapper
| 23.5
| 46
| 0.893617
| 7
| 47
| 5.714286
| 0.714286
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9def728cfe9a0dbe1a1393007ec292ad1e356086
| 102,176
|
py
|
Python
|
turdshovel/_stubs/System/Numerics.py
|
daddycocoaman/turdshovel
|
6f9d9b08734028fa819c590e8573ae49481dc769
|
[
"MIT"
] | 39
|
2021-10-30T06:34:21.000Z
|
2022-03-22T09:04:40.000Z
|
turdshovel/_stubs/System/Numerics.py
|
daddycocoaman/turdshovel
|
6f9d9b08734028fa819c590e8573ae49481dc769
|
[
"MIT"
] | null | null | null |
turdshovel/_stubs/System/Numerics.py
|
daddycocoaman/turdshovel
|
6f9d9b08734028fa819c590e8573ae49481dc769
|
[
"MIT"
] | 3
|
2021-10-30T03:56:16.000Z
|
2021-11-08T01:59:32.000Z
|
# encoding: utf-8
# module System.Numerics calls itself Numerics
# from System.Numerics.Vectors, Version=4.1.4.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a
# by generator 1.145
# no doc
# no imports
# functions
def Vector(*args, **kwargs): # real signature unknown
""" Provides a collection of static convenience methods for creating, manipulating, combining, and converting generic vectors. """
pass
# classes
class Matrix3x2(object, IEquatable[Matrix3x2]):
"""
Represents a 3x2 matrix.
Matrix3x2(m11: Single, m12: Single, m21: Single, m22: Single, m31: Single, m32: Single)
"""
@staticmethod
def Add(value1, value2):
"""
Add(value1: Matrix3x2, value2: Matrix3x2) -> Matrix3x2
Adds each element in one matrix with its corresponding element in a second matrix.
value1: The first matrix.
value2: The second matrix.
Returns: The matrix that contains the summed values of value1value1 and value2value2.
"""
pass
@staticmethod
def CreateRotation(radians, centerPoint=None):
"""
CreateRotation(radians: Single) -> Matrix3x2
Creates a rotation matrix using the given rotation in radians.
radians: The amount of rotation, in radians.
Returns: The rotation matrix.
CreateRotation(radians: Single, centerPoint: Vector2) -> Matrix3x2
Creates a rotation matrix using the specified rotation in radians and a center point.
radians: The amount of rotation, in radians.
centerPoint: The center point.
Returns: The rotation matrix.
"""
pass
@staticmethod
def CreateScale(*__args):
"""
CreateScale(xScale: Single, yScale: Single) -> Matrix3x2
Creates a scaling matrix from the specified X and Y components.
xScale: The value to scale by on the X axis.
yScale: The value to scale by on the Y axis.
Returns: The scaling matrix.
CreateScale(xScale: Single, yScale: Single, centerPoint: Vector2) -> Matrix3x2
Creates a scaling matrix that is offset by a given center point.
xScale: The value to scale by on the X axis.
yScale: The value to scale by on the Y axis.
centerPoint: The center point.
Returns: The scaling matrix.
CreateScale(scales: Vector2) -> Matrix3x2
Creates a scaling matrix from the specified vector scale.
scales: The scale to use.
Returns: The scaling matrix.
CreateScale(scales: Vector2, centerPoint: Vector2) -> Matrix3x2
Creates a scaling matrix from the specified vector scale with an offset from the specified center point.
scales: The scale to use.
centerPoint: The center offset.
Returns: The scaling matrix.
CreateScale(scale: Single) -> Matrix3x2
Creates a scaling matrix that scales uniformly with the given scale.
scale: The uniform scale to use.
Returns: The scaling matrix.
CreateScale(scale: Single, centerPoint: Vector2) -> Matrix3x2
Creates a scaling matrix that scales uniformly with the specified scale with an offset from the specified center.
scale: The uniform scale to use.
centerPoint: The center offset.
Returns: The scaling matrix.
"""
pass
@staticmethod
def CreateSkew(radiansX, radiansY, centerPoint=None):
"""
CreateSkew(radiansX: Single, radiansY: Single) -> Matrix3x2
Creates a skew matrix from the specified angles in radians.
radiansX: The X angle, in radians.
radiansY: The Y angle, in radians.
Returns: The skew matrix.
CreateSkew(radiansX: Single, radiansY: Single, centerPoint: Vector2) -> Matrix3x2
Creates a skew matrix from the specified angles in radians and a center point.
radiansX: The X angle, in radians.
radiansY: The Y angle, in radians.
centerPoint: The center point.
Returns: The skew matrix.
"""
pass
@staticmethod
def CreateTranslation(*__args):
"""
CreateTranslation(position: Vector2) -> Matrix3x2
Creates a translation matrix from the specified 2-dimensional vector.
position: The translation position.
Returns: The translation matrix.
CreateTranslation(xPosition: Single, yPosition: Single) -> Matrix3x2
Creates a translation matrix from the specified X and Y components.
xPosition: The X position.
yPosition: The Y position.
Returns: The translation matrix.
"""
pass
def Equals(self, *__args):
"""
Equals(self: Matrix3x2, other: Matrix3x2) -> bool
Returns a value that indicates whether this instance and another 3x2 matrix are equal.
other: The other matrix.
Returns: true if the two matrices are equal; otherwise, false.
Equals(self: Matrix3x2, obj: object) -> bool
Returns a value that indicates whether this instance and a specified object are equal.
obj: The object to compare with the current instance.
Returns: true if the current instance and objobj are equal; otherwise, false. If objobj is null, the method returns false.
"""
pass
def GetDeterminant(self):
"""
GetDeterminant(self: Matrix3x2) -> Single
Calculates the determinant for this matrix.
Returns: The determinant.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Matrix3x2) -> int
Returns the hash code for this instance.
Returns: The hash code.
"""
pass
@staticmethod
def Invert(matrix, result):
"""
Invert(matrix: Matrix3x2) -> (bool, Matrix3x2)
Inverts the specified matrix. The return value indicates whether the operation succeeded.
matrix: The matrix to invert.
Returns: true if matrixmatrix was converted successfully; otherwise, false.
"""
pass
@staticmethod
def Lerp(matrix1, matrix2, amount):
"""
Lerp(matrix1: Matrix3x2, matrix2: Matrix3x2, amount: Single) -> Matrix3x2
Performs a linear interpolation from one matrix to a second matrix based on a value that specifies the weighting of the second matrix.
matrix1: The first matrix.
matrix2: The second matrix.
amount: The relative weighting of matrix2.
Returns: The interpolated matrix.
"""
pass
@staticmethod
def Multiply(value1, value2):
"""
Multiply(value1: Matrix3x2, value2: Matrix3x2) -> Matrix3x2
Returns the matrix that results from multiplying two matrices together.
value1: The first matrix.
value2: The second matrix.
Returns: The product matrix.
Multiply(value1: Matrix3x2, value2: Single) -> Matrix3x2
Returns the matrix that results from scaling all the elements of a specified matrix by a scalar factor.
value1: The matrix to scale.
value2: The scaling value to use.
Returns: The scaled matrix.
"""
pass
@staticmethod
def Negate(value):
"""
Negate(value: Matrix3x2) -> Matrix3x2
Negates the specified matrix by multiplying all its values by -1.
value: The matrix to negate.
Returns: The negated matrix.
"""
pass
@staticmethod
def Subtract(value1, value2):
"""
Subtract(value1: Matrix3x2, value2: Matrix3x2) -> Matrix3x2
Subtracts each element in a second matrix from its corresponding element in a first matrix.
value1: The first matrix.
value2: The second matrix.
Returns: The matrix containing the values that result from subtracting each element in value2value2 from its corresponding element in value1value1.
"""
pass
def ToString(self):
"""
ToString(self: Matrix3x2) -> str
Returns a string that represents this matrix.
Returns: The string representation of this matrix.
"""
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __mul__(self, *args): #cannot find CLR method
""" x.__mul__(y) <==> x*yx.__mul__(y) <==> x*y """
pass
def __neg__(self, *args): #cannot find CLR method
""" x.__neg__() <==> -x """
pass
@staticmethod # known case of __new__
def __new__(self, m11, m12, m21, m22, m31, m32):
"""
__new__(cls: type, m11: Single, m12: Single, m21: Single, m22: Single, m31: Single, m32: Single)
__new__[Matrix3x2]() -> Matrix3x2
"""
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __radd__(self, *args): #cannot find CLR method
"""
__radd__(value1: Matrix3x2, value2: Matrix3x2) -> Matrix3x2
Adds each element in one matrix with its corresponding element in a second matrix.
value1: The first matrix.
value2: The second matrix.
Returns: The matrix that contains the summed values.
"""
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __rmul__(self, *args): #cannot find CLR method
"""
__rmul__(value1: Matrix3x2, value2: Matrix3x2) -> Matrix3x2
Returns the matrix that results from multiplying two matrices together.
value1: The first matrix.
value2: The second matrix.
Returns: The product matrix.
"""
pass
def __rsub__(self, *args): #cannot find CLR method
"""
__rsub__(value1: Matrix3x2, value2: Matrix3x2) -> Matrix3x2
Subtracts each element in a second matrix from its corresponding element in a first matrix.
value1: The first matrix.
value2: The second matrix.
Returns: The matrix containing the values that result from subtracting each element in value2value2 from its corresponding element in value1value1.
"""
pass
def __str__(self, *args): #cannot find CLR method
pass
def __sub__(self, *args): #cannot find CLR method
""" x.__sub__(y) <==> x-y """
pass
IsIdentity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Indicates whether the current matrix is the identity matrix.
Get: IsIdentity(self: Matrix3x2) -> bool
"""
Translation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Gets or sets the translation component of this matrix.
Get: Translation(self: Matrix3x2) -> Vector2
Set: Translation(self: Matrix3x2) = value
"""
Identity = None
M11 = None
M12 = None
M21 = None
M22 = None
M31 = None
M32 = None
class Matrix4x4(object, IEquatable[Matrix4x4]):
"""
Represents a 4x4 matrix.
Matrix4x4(m11: Single, m12: Single, m13: Single, m14: Single, m21: Single, m22: Single, m23: Single, m24: Single, m31: Single, m32: Single, m33: Single, m34: Single, m41: Single, m42: Single, m43: Single, m44: Single)
Matrix4x4(value: Matrix3x2)
"""
@staticmethod
def Add(value1, value2):
"""
Add(value1: Matrix4x4, value2: Matrix4x4) -> Matrix4x4
Adds each element in one matrix with its corresponding element in a second matrix.
value1: The first matrix.
value2: The second matrix.
Returns: The matrix that contains the summed values of value1value1 and value2value2.
"""
pass
@staticmethod
def CreateBillboard(objectPosition, cameraPosition, cameraUpVector, cameraForwardVector):
"""
CreateBillboard(objectPosition: Vector3, cameraPosition: Vector3, cameraUpVector: Vector3, cameraForwardVector: Vector3) -> Matrix4x4
Creates a spherical billboard that rotates around a specified object position.
objectPosition: The position of the object that the billboard will rotate around.
cameraPosition: The position of the camera.
cameraUpVector: The up vector of the camera.
cameraForwardVector: The forward vector of the camera.
Returns: The created billboard.
"""
pass
@staticmethod
def CreateConstrainedBillboard(objectPosition, cameraPosition, rotateAxis, cameraForwardVector, objectForwardVector):
"""
CreateConstrainedBillboard(objectPosition: Vector3, cameraPosition: Vector3, rotateAxis: Vector3, cameraForwardVector: Vector3, objectForwardVector: Vector3) -> Matrix4x4
Creates a cylindrical billboard that rotates around a specified axis.
objectPosition: The position of the object that the billboard will rotate around.
cameraPosition: The position of the camera.
rotateAxis: The axis to rotate the billboard around.
cameraForwardVector: The forward vector of the camera.
objectForwardVector: The forward vector of the object.
Returns: The billboard matrix.
"""
pass
@staticmethod
def CreateFromAxisAngle(axis, angle):
"""
CreateFromAxisAngle(axis: Vector3, angle: Single) -> Matrix4x4
Creates a matrix that rotates around an arbitrary vector.
axis: The axis to rotate around.
angle: The angle to rotate around axis, in radians.
Returns: The rotation matrix.
"""
pass
@staticmethod
def CreateFromQuaternion(quaternion):
"""
CreateFromQuaternion(quaternion: Quaternion) -> Matrix4x4
Creates a rotation matrix from the specified Quaternion rotation value.
quaternion: The source Quaternion.
Returns: The rotation matrix.
"""
pass
@staticmethod
def CreateFromYawPitchRoll(yaw, pitch, roll):
"""
CreateFromYawPitchRoll(yaw: Single, pitch: Single, roll: Single) -> Matrix4x4
Creates a rotation matrix from the specified yaw, pitch, and roll.
yaw: The angle of rotation, in radians, around the Y axis.
pitch: The angle of rotation, in radians, around the X axis.
roll: The angle of rotation, in radians, around the Z axis.
Returns: The rotation matrix.
"""
pass
@staticmethod
def CreateLookAt(cameraPosition, cameraTarget, cameraUpVector):
"""
CreateLookAt(cameraPosition: Vector3, cameraTarget: Vector3, cameraUpVector: Vector3) -> Matrix4x4
Creates a view matrix.
cameraPosition: The position of the camera.
cameraTarget: The target towards which the camera is pointing.
cameraUpVector: The direction that is "up" from the camera's point of view.
Returns: The view matrix.
"""
pass
@staticmethod
def CreateOrthographic(width, height, zNearPlane, zFarPlane):
"""
CreateOrthographic(width: Single, height: Single, zNearPlane: Single, zFarPlane: Single) -> Matrix4x4
Creates an orthographic perspective matrix from the given view volume dimensions.
width: The width of the view volume.
height: The height of the view volume.
zNearPlane: The minimum Z-value of the view volume.
zFarPlane: The maximum Z-value of the view volume.
Returns: The orthographic projection matrix.
"""
pass
@staticmethod
def CreateOrthographicOffCenter(left, right, bottom, top, zNearPlane, zFarPlane):
"""
CreateOrthographicOffCenter(left: Single, right: Single, bottom: Single, top: Single, zNearPlane: Single, zFarPlane: Single) -> Matrix4x4
Creates a customized orthographic projection matrix.
left: The minimum X-value of the view volume.
right: The maximum X-value of the view volume.
bottom: The minimum Y-value of the view volume.
top: The maximum Y-value of the view volume.
zNearPlane: The minimum Z-value of the view volume.
zFarPlane: The maximum Z-value of the view volume.
Returns: The orthographic projection matrix.
"""
pass
@staticmethod
def CreatePerspective(width, height, nearPlaneDistance, farPlaneDistance):
"""
CreatePerspective(width: Single, height: Single, nearPlaneDistance: Single, farPlaneDistance: Single) -> Matrix4x4
Creates a perspective projection matrix from the given view volume dimensions.
width: The width of the view volume at the near view plane.
height: The height of the view volume at the near view plane.
nearPlaneDistance: The distance to the near view plane.
farPlaneDistance: The distance to the far view plane.
Returns: The perspective projection matrix.
"""
pass
@staticmethod
def CreatePerspectiveFieldOfView(fieldOfView, aspectRatio, nearPlaneDistance, farPlaneDistance):
"""
CreatePerspectiveFieldOfView(fieldOfView: Single, aspectRatio: Single, nearPlaneDistance: Single, farPlaneDistance: Single) -> Matrix4x4
Creates a perspective projection matrix based on a field of view, aspect ratio, and near and far view plane distances.
fieldOfView: The field of view in the y direction, in radians.
aspectRatio: The aspect ratio, defined as view space width divided by height.
nearPlaneDistance: The distance to the near view plane.
farPlaneDistance: The distance to the far view plane.
Returns: The perspective projection matrix.
"""
pass
@staticmethod
def CreatePerspectiveOffCenter(left, right, bottom, top, nearPlaneDistance, farPlaneDistance):
"""
CreatePerspectiveOffCenter(left: Single, right: Single, bottom: Single, top: Single, nearPlaneDistance: Single, farPlaneDistance: Single) -> Matrix4x4
Creates a customized perspective projection matrix.
left: The minimum x-value of the view volume at the near view plane.
right: The maximum x-value of the view volume at the near view plane.
bottom: The minimum y-value of the view volume at the near view plane.
top: The maximum y-value of the view volume at the near view plane.
nearPlaneDistance: The distance to the near view plane.
farPlaneDistance: The distance to the far view plane.
Returns: The perspective projection matrix.
"""
pass
@staticmethod
def CreateReflection(value):
"""
CreateReflection(value: Plane) -> Matrix4x4
Creates a matrix that reflects the coordinate system about a specified plane.
value: The plane about which to create a reflection.
Returns: A new matrix expressing the reflection.
"""
pass
@staticmethod
def CreateRotationX(radians, centerPoint=None):
"""
CreateRotationX(radians: Single) -> Matrix4x4
Creates a matrix for rotating points around the X axis.
radians: The amount, in radians, by which to rotate around the X axis.
Returns: The rotation matrix.
CreateRotationX(radians: Single, centerPoint: Vector3) -> Matrix4x4
Creates a matrix for rotating points around the X axis from a center point.
radians: The amount, in radians, by which to rotate around the X axis.
centerPoint: The center point.
Returns: The rotation matrix.
"""
pass
@staticmethod
def CreateRotationY(radians, centerPoint=None):
"""
CreateRotationY(radians: Single) -> Matrix4x4
Creates a matrix for rotating points around the Y axis.
radians: The amount, in radians, by which to rotate around the Y-axis.
Returns: The rotation matrix.
CreateRotationY(radians: Single, centerPoint: Vector3) -> Matrix4x4
The amount, in radians, by which to rotate around the Y axis from a center point.
radians: The amount, in radians, by which to rotate around the Y-axis.
centerPoint: The center point.
Returns: The rotation matrix.
"""
pass
@staticmethod
def CreateRotationZ(radians, centerPoint=None):
"""
CreateRotationZ(radians: Single) -> Matrix4x4
Creates a matrix for rotating points around the Z axis.
radians: The amount, in radians, by which to rotate around the Z-axis.
Returns: The rotation matrix.
CreateRotationZ(radians: Single, centerPoint: Vector3) -> Matrix4x4
Creates a matrix for rotating points around the Z axis from a center point.
radians: The amount, in radians, by which to rotate around the Z-axis.
centerPoint: The center point.
Returns: The rotation matrix.
"""
pass
@staticmethod
def CreateScale(*__args):
"""
CreateScale(xScale: Single, yScale: Single, zScale: Single) -> Matrix4x4
Creates a scaling matrix from the specified X, Y, and Z components.
xScale: The value to scale by on the X axis.
yScale: The value to scale by on the Y axis.
zScale: The value to scale by on the Z axis.
Returns: The scaling matrix.
CreateScale(xScale: Single, yScale: Single, zScale: Single, centerPoint: Vector3) -> Matrix4x4
Creates a scaling matrix that is offset by a given center point.
xScale: The value to scale by on the X axis.
yScale: The value to scale by on the Y axis.
zScale: The value to scale by on the Z axis.
centerPoint: The center point.
Returns: The scaling matrix.
CreateScale(scales: Vector3) -> Matrix4x4
Creates a scaling matrix from the specified vector scale.
scales: The scale to use.
Returns: The scaling matrix.
CreateScale(scales: Vector3, centerPoint: Vector3) -> Matrix4x4
Creates a scaling matrix with a center point.
scales: The vector that contains the amount to scale on each axis.
centerPoint: The center point.
Returns: The scaling matrix.
CreateScale(scale: Single) -> Matrix4x4
Creates a uniform scaling matrix that scale equally on each axis.
scale: The uniform scaling factor.
Returns: The scaling matrix.
CreateScale(scale: Single, centerPoint: Vector3) -> Matrix4x4
Creates a uniform scaling matrix that scales equally on each axis with a center point.
scale: The uniform scaling factor.
centerPoint: The center point.
Returns: The scaling matrix.
"""
pass
@staticmethod
def CreateShadow(lightDirection, plane):
"""
CreateShadow(lightDirection: Vector3, plane: Plane) -> Matrix4x4
Creates a matrix that flattens geometry into a specified plane as if casting a shadow from a specified light source.
lightDirection: The direction from which the light that will cast the shadow is coming.
plane: The plane onto which the new matrix should flatten geometry so as to cast a shadow.
Returns: A new matrix that can be used to flatten geometry onto the specified plane from the specified direction.
"""
pass
@staticmethod
def CreateTranslation(*__args):
"""
CreateTranslation(position: Vector3) -> Matrix4x4
Creates a translation matrix from the specified 3-dimensional vector.
position: The amount to translate in each axis.
Returns: The translation matrix.
CreateTranslation(xPosition: Single, yPosition: Single, zPosition: Single) -> Matrix4x4
Creates a translation matrix from the specified X, Y, and Z components.
xPosition: The amount to translate on the X axis.
yPosition: The amount to translate on the Y axis.
zPosition: The amount to translate on the Z axis.
Returns: The translation matrix.
"""
pass
@staticmethod
def CreateWorld(position, forward, up):
"""
CreateWorld(position: Vector3, forward: Vector3, up: Vector3) -> Matrix4x4
Creates a world matrix with the specified parameters.
position: The position of the object.
forward: The forward direction of the object.
up: The upward direction of the object. Its value is usually [0, 1, 0].
Returns: The world matrix.
"""
pass
@staticmethod
def Decompose(matrix, scale, rotation, translation):
"""
Decompose(matrix: Matrix4x4) -> (bool, Vector3, Quaternion, Vector3)
Attempts to extract the scale, translation, and rotation components from the given scale, rotation, or translation matrix. The return value indicates whether the operation succeeded.
matrix: The source matrix.
Returns: true if matrixmatrix was decomposed successfully; otherwise, false.
"""
pass
def Equals(self, *__args):
"""
Equals(self: Matrix4x4, other: Matrix4x4) -> bool
Returns a value that indicates whether this instance and another 4x4 matrix are equal.
other: The other matrix.
Returns: true if the two matrices are equal; otherwise, false.
Equals(self: Matrix4x4, obj: object) -> bool
Returns a value that indicates whether this instance and a specified object are equal.
obj: The object to compare with the current instance.
Returns: true if the current instance and objobj are equal; otherwise, false. If objobj is null, the method returns false.
"""
pass
def GetDeterminant(self):
"""
GetDeterminant(self: Matrix4x4) -> Single
Calculates the determinant of the current 4x4 matrix.
Returns: The determinant.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Matrix4x4) -> int
Returns the hash code for this instance.
Returns: The hash code.
"""
pass
@staticmethod
def Invert(matrix, result):
"""
Invert(matrix: Matrix4x4) -> (bool, Matrix4x4)
Inverts the specified matrix. The return value indicates whether the operation succeeded.
matrix: The matrix to invert.
Returns: true if matrixmatrix was converted successfully; otherwise, false.
"""
pass
@staticmethod
def Lerp(matrix1, matrix2, amount):
"""
Lerp(matrix1: Matrix4x4, matrix2: Matrix4x4, amount: Single) -> Matrix4x4
Performs a linear interpolation from one matrix to a second matrix based on a value that specifies the weighting of the second matrix.
matrix1: The first matrix.
matrix2: The second matrix.
amount: The relative weighting of matrix2.
Returns: The interpolated matrix.
"""
pass
@staticmethod
def Multiply(value1, value2):
"""
Multiply(value1: Matrix4x4, value2: Matrix4x4) -> Matrix4x4
Returns the matrix that results from multiplying two matrices together.
value1: The first matrix.
value2: The second matrix.
Returns: The product matrix.
Multiply(value1: Matrix4x4, value2: Single) -> Matrix4x4
Returns the matrix that results from scaling all the elements of a specified matrix by a scalar factor.
value1: The matrix to scale.
value2: The scaling value to use.
Returns: The scaled matrix.
"""
pass
@staticmethod
def Negate(value):
"""
Negate(value: Matrix4x4) -> Matrix4x4
Negates the specified matrix by multiplying all its values by -1.
value: The matrix to negate.
Returns: The negated matrix.
"""
pass
@staticmethod
def Subtract(value1, value2):
"""
Subtract(value1: Matrix4x4, value2: Matrix4x4) -> Matrix4x4
Subtracts each element in a second matrix from its corresponding element in a first matrix.
value1: The first matrix.
value2: The second matrix.
Returns: The matrix containing the values that result from subtracting each element in value2value2 from its corresponding element in value1value1.
"""
pass
def ToString(self):
"""
ToString(self: Matrix4x4) -> str
Returns a string that represents this matrix.
Returns: The string representation of this matrix.
"""
pass
@staticmethod
def Transform(value, rotation):
"""
Transform(value: Matrix4x4, rotation: Quaternion) -> Matrix4x4
Transforms the specified matrix by applying the specified Quaternion rotation.
value: The matrix to transform.
rotation: The rotation t apply.
Returns: The transformed matrix.
"""
pass
@staticmethod
def Transpose(matrix):
"""
Transpose(matrix: Matrix4x4) -> Matrix4x4
Transposes the rows and columns of a matrix.
matrix: The matrix to transpose.
Returns: The transposed matrix.
"""
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __mul__(self, *args): #cannot find CLR method
""" x.__mul__(y) <==> x*yx.__mul__(y) <==> x*y """
pass
def __neg__(self, *args): #cannot find CLR method
""" x.__neg__() <==> -x """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type, m11: Single, m12: Single, m13: Single, m14: Single, m21: Single, m22: Single, m23: Single, m24: Single, m31: Single, m32: Single, m33: Single, m34: Single, m41: Single, m42: Single, m43: Single, m44: Single)
__new__(cls: type, value: Matrix3x2)
__new__[Matrix4x4]() -> Matrix4x4
"""
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __radd__(self, *args): #cannot find CLR method
"""
__radd__(value1: Matrix4x4, value2: Matrix4x4) -> Matrix4x4
Adds each element in one matrix with its corresponding element in a second matrix.
value1: The first matrix.
value2: The second matrix.
Returns: The matrix that contains the summed values.
"""
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __rmul__(self, *args): #cannot find CLR method
"""
__rmul__(value1: Matrix4x4, value2: Matrix4x4) -> Matrix4x4
Returns the matrix that results from multiplying two matrices together.
value1: The first matrix.
value2: The second matrix.
Returns: The product matrix.
"""
pass
def __rsub__(self, *args): #cannot find CLR method
"""
__rsub__(value1: Matrix4x4, value2: Matrix4x4) -> Matrix4x4
Subtracts each element in a second matrix from its corresponding element in a first matrix.
value1: The first matrix.
value2: The second matrix.
Returns: The matrix containing the values that result from subtracting each element in value2value2 from its corresponding element in value1value1.
"""
pass
def __str__(self, *args): #cannot find CLR method
pass
def __sub__(self, *args): #cannot find CLR method
""" x.__sub__(y) <==> x-y """
pass
IsIdentity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Indicates whether the current matrix is the identity matrix.
Get: IsIdentity(self: Matrix4x4) -> bool
"""
Translation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Gets or sets the translation component of this matrix.
Get: Translation(self: Matrix4x4) -> Vector3
Set: Translation(self: Matrix4x4) = value
"""
Identity = None
M11 = None
M12 = None
M13 = None
M14 = None
M21 = None
M22 = None
M23 = None
M24 = None
M31 = None
M32 = None
M33 = None
M34 = None
M41 = None
M42 = None
M43 = None
M44 = None
class Plane(object, IEquatable[Plane]):
"""
Represents a three-dimensional plane.
Plane(x: Single, y: Single, z: Single, d: Single)
Plane(normal: Vector3, d: Single)
Plane(value: Vector4)
"""
@staticmethod
def CreateFromVertices(point1, point2, point3):
"""
CreateFromVertices(point1: Vector3, point2: Vector3, point3: Vector3) -> Plane
Creates a System.Numerics.Plane object that contains three specified points.
point1: The first point defining the plane.
point2: The second point defining the plane.
point3: The third point defining the plane.
Returns: The plane containing the three points.
"""
pass
@staticmethod
def Dot(plane, value):
"""
Dot(plane: Plane, value: Vector4) -> Single
Calculates the dot product of a plane and a 4-dimensional vector.
plane: The plane.
value: The four-dimensional vector.
Returns: The dot product.
"""
pass
@staticmethod
def DotCoordinate(plane, value):
"""
DotCoordinate(plane: Plane, value: Vector3) -> Single
Returns the dot product of a specified three-dimensional vector and the normal vector of this plane plus the distance (System.Numerics.Plane.D) value of the plane.
plane: The plane.
value: The 3-dimensional vector.
Returns: The dot product.
"""
pass
@staticmethod
def DotNormal(plane, value):
"""
DotNormal(plane: Plane, value: Vector3) -> Single
Returns the dot product of a specified three-dimensional vector and the System.Numerics.Plane.Normal vector of this plane.
plane: The plane.
value: The three-dimensional vector.
Returns: The dot product.
"""
pass
def Equals(self, *__args):
"""
Equals(self: Plane, other: Plane) -> bool
Returns a value that indicates whether this instance and another plane object are equal.
other: The other plane.
Returns: true if the two planes are equal; otherwise, false.
Equals(self: Plane, obj: object) -> bool
Returns a value that indicates whether this instance and a specified object are equal.
obj: The object to compare with the current instance.
Returns: true if the current instance and objobj are equal; otherwise, false. If objobj is null, the method returns false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Plane) -> int
Returns the hash code for this instance.
Returns: The hash code.
"""
pass
@staticmethod
def Normalize(value):
"""
Normalize(value: Plane) -> Plane
Creates a new System.Numerics.Plane object whose normal vector is the source plane's normal vector normalized.
value: The source plane.
Returns: The normalized plane.
"""
pass
def ToString(self):
"""
ToString(self: Plane) -> str
Returns the string representation of this plane object.
Returns: A string that represents this stem.Numerics.Plane object.
"""
pass
@staticmethod
def Transform(plane, *__args):
"""
Transform(plane: Plane, matrix: Matrix4x4) -> Plane
Transforms a normalized plane by a 4x4 matrix.
plane: The normalized plane to transform.
matrix: The transformation matrix to apply to plane.
Returns: The transformed plane.
Transform(plane: Plane, rotation: Quaternion) -> Plane
Transforms a normalized plane by a Quaternion rotation.
plane: The normalized plane to transform.
rotation: The Quaternion rotation to apply to the plane.
Returns: A new plane that results from applying the Quaternion rotation.
"""
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type, x: Single, y: Single, z: Single, d: Single)
__new__(cls: type, normal: Vector3, d: Single)
__new__(cls: type, value: Vector4)
__new__[Plane]() -> Plane
"""
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __str__(self, *args): #cannot find CLR method
pass
D = None
Normal = None
class Quaternion(object, IEquatable[Quaternion]):
"""
Represents a vector that is used to encode three-dimensional physical rotations.
Quaternion(x: Single, y: Single, z: Single, w: Single)
Quaternion(vectorPart: Vector3, scalarPart: Single)
"""
@staticmethod
def Add(value1, value2):
"""
Add(value1: Quaternion, value2: Quaternion) -> Quaternion
Adds each element in one quaternion with its corresponding element in a second quaternion.
value1: The first quaternion.
value2: The second quaternion.
Returns: The quaternion that contains the summed values of value1value1 and value2value2.
"""
pass
@staticmethod
def Concatenate(value1, value2):
"""
Concatenate(value1: Quaternion, value2: Quaternion) -> Quaternion
Concatenates two quaternions.
value1: The first quaternion rotation in the series.
value2: The second quaternion rotation in the series.
Returns: A new quaternion representing the concatenation of the value1value1 rotation followed by the value2value2 rotation.
"""
pass
@staticmethod
def Conjugate(value):
"""
Conjugate(value: Quaternion) -> Quaternion
Returns the conjugate of a specified quaternion.
value: The quaternion.
Returns: A new quaternion that is the conjugate of value.
"""
pass
@staticmethod
def CreateFromAxisAngle(axis, angle):
"""
CreateFromAxisAngle(axis: Vector3, angle: Single) -> Quaternion
Creates a quaternion from a vector and an angle to rotate about the vector.
axis: The vector to rotate around.
angle: The angle, in radians, to rotate around the vector.
Returns: The newly created quaternion.
"""
pass
@staticmethod
def CreateFromRotationMatrix(matrix):
"""
CreateFromRotationMatrix(matrix: Matrix4x4) -> Quaternion
Creates a quaternion from the specified rotation matrix.
matrix: The rotation matrix.
Returns: The newly created quaternion.
"""
pass
@staticmethod
def CreateFromYawPitchRoll(yaw, pitch, roll):
"""
CreateFromYawPitchRoll(yaw: Single, pitch: Single, roll: Single) -> Quaternion
Creates a new quaternion from the given yaw, pitch, and roll.
yaw: The yaw angle, in radians, around the Y axis.
pitch: The pitch angle, in radians, around the X axis.
roll: The roll angle, in radians, around the Z axis.
Returns: The resulting quaternion.
"""
pass
@staticmethod
def Divide(value1, value2):
"""
Divide(value1: Quaternion, value2: Quaternion) -> Quaternion
Divides one quaternion by a second quaternion.
value1: The dividend.
value2: The divisor.
Returns: The quaternion that results from dividing value1value1 by value2value2.
"""
pass
@staticmethod
def Dot(quaternion1, quaternion2):
"""
Dot(quaternion1: Quaternion, quaternion2: Quaternion) -> Single
Calculates the dot product of two quaternions.
quaternion1: The first quaternion.
quaternion2: The second quaternion.
Returns: The dot product.
"""
pass
def Equals(self, *__args):
"""
Equals(self: Quaternion, other: Quaternion) -> bool
Returns a value that indicates whether this instance and another quaternion are equal.
other: The other quaternion.
Returns: true if the two quaternions are equal; otherwise, false.
Equals(self: Quaternion, obj: object) -> bool
Returns a value that indicates whether this instance and a specified object are equal.
obj: The object to compare with the current instance.
Returns: true if the current instance and objobj are equal; otherwise, false. If objobj is null, the method returns false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Quaternion) -> int
Returns the hash code for this instance.
Returns: The hash code.
"""
pass
@staticmethod
def Inverse(value):
"""
Inverse(value: Quaternion) -> Quaternion
Returns the inverse of a quaternion.
value: The quaternion.
Returns: The inverted quaternion.
"""
pass
def Length(self):
"""
Length(self: Quaternion) -> Single
Calculates the length of the quaternion.
Returns: The computed length of the quaternion.
"""
pass
def LengthSquared(self):
"""
LengthSquared(self: Quaternion) -> Single
Calculates the squared length of the quaternion.
Returns: The length squared of the quaternion.
"""
pass
@staticmethod
def Lerp(quaternion1, quaternion2, amount):
"""
Lerp(quaternion1: Quaternion, quaternion2: Quaternion, amount: Single) -> Quaternion
Performs a linear interpolation between two quaternions based on a value that specifies the weighting of the second quaternion.
quaternion1: The first quaternion.
quaternion2: The second quaternion.
amount: The relative weight of quaternion2 in the interpolation.
Returns: The interpolated quaternion.
"""
pass
@staticmethod
def Multiply(value1, value2):
"""
Multiply(value1: Quaternion, value2: Quaternion) -> Quaternion
Returns the quaternion that results from multiplying two quaternions together.
value1: The first quaternion.
value2: The second quaternion.
Returns: The product quaternion.
Multiply(value1: Quaternion, value2: Single) -> Quaternion
Returns the quaternion that results from scaling all the components of a specified quaternion by a scalar factor.
value1: The source quaternion.
value2: The scalar value.
Returns: The scaled quaternion.
"""
pass
@staticmethod
def Negate(value):
"""
Negate(value: Quaternion) -> Quaternion
Reverses the sign of each component of the quaternion.
value: The quaternion to negate.
Returns: The negated quaternion.
"""
pass
@staticmethod
def Normalize(value):
"""
Normalize(value: Quaternion) -> Quaternion
Divides each component of a specified System.Numerics.Quaternion by its length.
value: The quaternion to normalize.
Returns: The normalized quaternion.
"""
pass
@staticmethod
def Slerp(quaternion1, quaternion2, amount):
"""
Slerp(quaternion1: Quaternion, quaternion2: Quaternion, amount: Single) -> Quaternion
Interpolates between two quaternions, using spherical linear interpolation.
quaternion1: The first quaternion.
quaternion2: The second quaternion.
amount: The relative weight of the second quaternion in the interpolation.
Returns: The interpolated quaternion.
"""
pass
@staticmethod
def Subtract(value1, value2):
"""
Subtract(value1: Quaternion, value2: Quaternion) -> Quaternion
Subtracts each element in a second quaternion from its corresponding element in a first quaternion.
value1: The first quaternion.
value2: The second quaternion.
Returns: The quaternion containing the values that result from subtracting each element in value2value2 from its corresponding element in value1value1.
"""
pass
def ToString(self):
"""
ToString(self: Quaternion) -> str
Returns a string that represents this quaternion.
Returns: The string representation of this quaternion.
"""
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __div__(self, *args): #cannot find CLR method
""" x.__div__(y) <==> x/y """
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __mul__(self, *args): #cannot find CLR method
""" x.__mul__(y) <==> x*yx.__mul__(y) <==> x*y """
pass
def __neg__(self, *args): #cannot find CLR method
""" x.__neg__() <==> -x """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type, x: Single, y: Single, z: Single, w: Single)
__new__(cls: type, vectorPart: Vector3, scalarPart: Single)
__new__[Quaternion]() -> Quaternion
"""
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __radd__(self, *args): #cannot find CLR method
"""
__radd__(value1: Quaternion, value2: Quaternion) -> Quaternion
Adds each element in one quaternion with its corresponding element in a second quaternion.
value1: The first quaternion.
value2: The second quaternion.
Returns: The quaternion that contains the summed values of value1value1 and value2value2.
"""
pass
def __rdiv__(self, *args): #cannot find CLR method
"""
__rdiv__(value1: Quaternion, value2: Quaternion) -> Quaternion
Divides one quaternion by a second quaternion.
value1: The dividend.
value2: The divisor.
Returns: The quaternion that results from dividing value1value1 by value2value2.
"""
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __rmul__(self, *args): #cannot find CLR method
"""
__rmul__(value1: Quaternion, value2: Quaternion) -> Quaternion
Returns the quaternion that results from multiplying two quaternions together.
value1: The first quaternion.
value2: The second quaternion.
Returns: The product quaternion.
"""
pass
def __rsub__(self, *args): #cannot find CLR method
"""
__rsub__(value1: Quaternion, value2: Quaternion) -> Quaternion
Subtracts each element in a second quaternion from its corresponding element in a first quaternion.
value1: The first quaternion.
value2: The second quaternion.
Returns: The quaternion containing the values that result from subtracting each element in value2value2 from its corresponding element in value1value1.
"""
pass
def __str__(self, *args): #cannot find CLR method
pass
def __sub__(self, *args): #cannot find CLR method
""" x.__sub__(y) <==> x-y """
pass
IsIdentity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Gets a value that indicates whether the current instance is the identity quaternion.
Get: IsIdentity(self: Quaternion) -> bool
"""
Identity = None
W = None
X = None
Y = None
Z = None
class Vector2(object, IEquatable[Vector2], IFormattable):
"""
Represents a vector with two single-precision floating-point values.
Vector2(value: Single)
Vector2(x: Single, y: Single)
"""
@staticmethod
def Abs(value):
"""
Abs(value: Vector2) -> Vector2
Returns a vector whose elements are the absolute values of each of the specified vector's elements.
value: A vector.
Returns: The absolute value vector.
"""
pass
@staticmethod
def Add(left, right):
"""
Add(left: Vector2, right: Vector2) -> Vector2
Adds two vectors together.
left: The first vector to add.
right: The second vector to add.
Returns: The summed vector.
"""
pass
@staticmethod
def Clamp(value1, min, max):
"""
Clamp(value1: Vector2, min: Vector2, max: Vector2) -> Vector2
Restricts a vector between a minimum and a maximum value.
value1: The vector to restrict.
min: The minimum value.
max: The maximum value.
Returns: The restricted vector.
"""
pass
def CopyTo(self, array, index=None):
"""
CopyTo(self: Vector2, array: Array[Single])
Copies the elements of the vector to a specified array.
array: The destination array.
CopyTo(self: Vector2, array: Array[Single], index: int)
Copies the elements of the vector to a specified array starting at a specified index position.
array: The destination array.
index: The index at which to copy the first element of the vector.
"""
pass
@staticmethod
def Distance(value1, value2):
"""
Distance(value1: Vector2, value2: Vector2) -> Single
Computes the Euclidean distance between the two given points.
value1: The first point.
value2: The second point.
Returns: The distance.
"""
pass
@staticmethod
def DistanceSquared(value1, value2):
"""
DistanceSquared(value1: Vector2, value2: Vector2) -> Single
Returns the Euclidean distance squared between two specified points.
value1: The first point.
value2: The second point.
Returns: The distance squared.
"""
pass
@staticmethod
def Divide(left, *__args):
"""
Divide(left: Vector2, right: Vector2) -> Vector2
Divides the first vector by the second.
left: The first vector.
right: The second vector.
Returns: The vector resulting from the division.
Divide(left: Vector2, divisor: Single) -> Vector2
Divides the specified vector by a specified scalar value.
left: The vector.
divisor: The scalar value.
Returns: The vector that results from the division.
"""
pass
@staticmethod
def Dot(value1, value2):
"""
Dot(value1: Vector2, value2: Vector2) -> Single
Returns the dot product of two vectors.
value1: The first vector.
value2: The second vector.
Returns: The dot product.
"""
pass
def Equals(self, *__args):
"""
Equals(self: Vector2, obj: object) -> bool
Returns a value that indicates whether this instance and a specified object are equal.
obj: The object to compare with the current instance.
Returns: true if the current instance and objobj are equal; otherwise, false. If objobj is null, the method returns false.
Equals(self: Vector2, other: Vector2) -> bool
Returns a value that indicates whether this instance and another vector are equal.
other: The other vector.
Returns: true if the two vectors are equal; otherwise, false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Vector2) -> int
Returns the hash code for this instance.
Returns: The hash code.
"""
pass
def Length(self):
"""
Length(self: Vector2) -> Single
Returns the length of the vector.
Returns: The vector's length.
"""
pass
def LengthSquared(self):
"""
LengthSquared(self: Vector2) -> Single
Returns the length of the vector squared.
Returns: The vector's length squared.
"""
pass
@staticmethod
def Lerp(value1, value2, amount):
"""
Lerp(value1: Vector2, value2: Vector2, amount: Single) -> Vector2
Performs a linear interpolation between two vectors based on the given weighting.
value1: The first vector.
value2: The second vector.
amount: A value between 0 and 1 that indicates the weight of value2.
Returns: The interpolated vector.
"""
pass
@staticmethod
def Max(value1, value2):
"""
Max(value1: Vector2, value2: Vector2) -> Vector2
Returns a vector whose elements are the maximum of each of the pairs of elements in two specified vectors.
value1: The first vector.
value2: The second vector.
Returns: The maximized vector.
"""
pass
@staticmethod
def Min(value1, value2):
"""
Min(value1: Vector2, value2: Vector2) -> Vector2
Returns a vector whose elements are the minimum of each of the pairs of elements in two specified vectors.
value1: The first vector.
value2: The second vector.
Returns: The minimized vector.
"""
pass
@staticmethod
def Multiply(left, right):
"""
Multiply(left: Vector2, right: Vector2) -> Vector2
Multiplies two vectors together.
left: The first vector.
right: The second vector.
Returns: The product vector.
Multiply(left: Vector2, right: Single) -> Vector2
Multiplies a vector by a specified scalar.
left: The vector to multiply.
right: The scalar value.
Returns: The scaled vector.
Multiply(left: Single, right: Vector2) -> Vector2
Multiplies a scalar value by a specified vector.
left: The scaled value.
right: The vector.
Returns: The scaled vector.
"""
pass
@staticmethod
def Negate(value):
"""
Negate(value: Vector2) -> Vector2
Negates a specified vector.
value: The vector to negate.
Returns: The negated vector.
"""
pass
@staticmethod
def Normalize(value):
"""
Normalize(value: Vector2) -> Vector2
Returns a vector with the same direction as the specified vector, but with a length of one.
value: The vector to normalize.
Returns: The normalized vector.
"""
pass
@staticmethod
def Reflect(vector, normal):
"""
Reflect(vector: Vector2, normal: Vector2) -> Vector2
Returns the reflection of a vector off a surface that has the specified normal.
vector: The source vector.
normal: The normal of the surface being reflected off.
Returns: The reflected vector.
"""
pass
@staticmethod
def SquareRoot(value):
"""
SquareRoot(value: Vector2) -> Vector2
Returns a vector whose elements are the square root of each of a specified vector's elements.
value: A vector.
Returns: The square root vector.
"""
pass
@staticmethod
def Subtract(left, right):
"""
Subtract(left: Vector2, right: Vector2) -> Vector2
Subtracts the second vector from the first.
left: The first vector.
right: The second vector.
Returns: The difference vector.
"""
pass
def ToString(self, format=None, formatProvider=None):
"""
ToString(self: Vector2) -> str
Returns the string representation of the current instance using default formatting.
Returns: The string representation of the current instance.
ToString(self: Vector2, format: str) -> str
Returns the string representation of the current instance using the specified format string to format individual elements.
format: A or that defines the format of individual elements.
Returns: The string representation of the current instance.
ToString(self: Vector2, format: str, formatProvider: IFormatProvider) -> str
Returns the string representation of the current instance using the specified format string to format individual elements and the specified format provider to define culture-specific
formatting.
format: A or that defines the format of individual elements.
formatProvider: A format provider that supplies culture-specific formatting information.
Returns: The string representation of the current instance.
"""
pass
@staticmethod
def Transform(*__args):
"""
Transform(position: Vector2, matrix: Matrix3x2) -> Vector2
Transforms a vector by a specified 3x2 matrix.
position: The vector to transform.
matrix: The transformation matrix.
Returns: The transformed vector.
Transform(position: Vector2, matrix: Matrix4x4) -> Vector2
Transforms a vector by a specified 4x4 matrix.
position: The vector to transform.
matrix: The transformation matrix.
Returns: The transformed vector.
Transform(value: Vector2, rotation: Quaternion) -> Vector2
Transforms a vector by the specified Quaternion rotation value.
value: The vector to rotate.
rotation: The rotation to apply.
Returns: The transformed vector.
"""
pass
@staticmethod
def TransformNormal(normal, matrix):
"""
TransformNormal(normal: Vector2, matrix: Matrix3x2) -> Vector2
Transforms a vector normal by the given 3x2 matrix.
normal: The source vector.
matrix: The matrix.
Returns: The transformed vector.
TransformNormal(normal: Vector2, matrix: Matrix4x4) -> Vector2
Transforms a vector normal by the given 4x4 matrix.
normal: The source vector.
matrix: The matrix.
Returns: The transformed vector.
"""
pass
def __abs__(self, *args): #cannot find CLR method
""" x.__abs__() <==> abs(x) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __div__(self, *args): #cannot find CLR method
""" x.__div__(y) <==> x/yx.__div__(y) <==> x/y """
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __mul__(self, *args): #cannot find CLR method
""" x.__mul__(y) <==> x*yx.__mul__(y) <==> x*y """
pass
def __neg__(self, *args): #cannot find CLR method
""" x.__neg__() <==> -x """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type, value: Single)
__new__(cls: type, x: Single, y: Single)
__new__[Vector2]() -> Vector2
"""
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __radd__(self, *args): #cannot find CLR method
"""
__radd__(left: Vector2, right: Vector2) -> Vector2
Adds two vectors together.
left: The first vector to add.
right: The second vector to add.
Returns: The summed vector.
"""
pass
def __rdiv__(self, *args): #cannot find CLR method
"""
__rdiv__(left: Vector2, right: Vector2) -> Vector2
Divides the first vector by the second.
left: The first vector.
right: The second vector.
Returns: The vector that results from dividing leftleft by rightright.
"""
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __rmul__(self, *args): #cannot find CLR method
"""
__rmul__(left: Vector2, right: Vector2) -> Vector2
Multiplies two vectors together.
left: The first vector.
right: The second vector.
Returns: The product vector.
__rmul__(left: Single, right: Vector2) -> Vector2
Multiples the scalar value by the specified vector.
left: The vector.
right: The scalar value.
Returns: The scaled vector.
"""
pass
def __rsub__(self, *args): #cannot find CLR method
"""
__rsub__(left: Vector2, right: Vector2) -> Vector2
Subtracts the second vector from the first.
left: The first vector.
right: The second vector.
Returns: The vector that results from subtracting rightright from leftleft.
"""
pass
def __str__(self, *args): #cannot find CLR method
pass
def __sub__(self, *args): #cannot find CLR method
""" x.__sub__(y) <==> x-y """
pass
One = None
UnitX = None
UnitY = None
X = None
Y = None
Zero = None
class Vector3(object, IEquatable[Vector3], IFormattable):
"""
Represents a vector with three single-precision floating-point values.
Vector3(value: Single)
Vector3(value: Vector2, z: Single)
Vector3(x: Single, y: Single, z: Single)
"""
@staticmethod
def Abs(value):
"""
Abs(value: Vector3) -> Vector3
Returns a vector whose elements are the absolute values of each of the specified vector's elements.
value: A vector.
Returns: The absolute value vector.
"""
pass
@staticmethod
def Add(left, right):
"""
Add(left: Vector3, right: Vector3) -> Vector3
Adds two vectors together.
left: The first vector to add.
right: The second vector to add.
Returns: The summed vector.
"""
pass
@staticmethod
def Clamp(value1, min, max):
"""
Clamp(value1: Vector3, min: Vector3, max: Vector3) -> Vector3
Restricts a vector between a minimum and a maximum value.
value1: The vector to restrict.
min: The minimum value.
max: The maximum value.
Returns: The restricted vector.
"""
pass
def CopyTo(self, array, index=None):
"""
CopyTo(self: Vector3, array: Array[Single])
Copies the elements of the vector to a specified array.
array: The destination array.
CopyTo(self: Vector3, array: Array[Single], index: int)
Copies the elements of the vector to a specified array starting at a specified index position.
array: The destination array.
index: The index at which to copy the first element of the vector.
"""
pass
@staticmethod
def Cross(vector1, vector2):
"""
Cross(vector1: Vector3, vector2: Vector3) -> Vector3
Computes the cross product of two vectors.
vector1: The first vector.
vector2: The second vector.
Returns: The cross product.
"""
pass
@staticmethod
def Distance(value1, value2):
"""
Distance(value1: Vector3, value2: Vector3) -> Single
Computes the Euclidean distance between the two given points.
value1: The first point.
value2: The second point.
Returns: The distance.
"""
pass
@staticmethod
def DistanceSquared(value1, value2):
"""
DistanceSquared(value1: Vector3, value2: Vector3) -> Single
Returns the Euclidean distance squared between two specified points.
value1: The first point.
value2: The second point.
Returns: The distance squared.
"""
pass
@staticmethod
def Divide(left, *__args):
"""
Divide(left: Vector3, right: Vector3) -> Vector3
Divides the first vector by the second.
left: The first vector.
right: The second vector.
Returns: The vector resulting from the division.
Divide(left: Vector3, divisor: Single) -> Vector3
Divides the specified vector by a specified scalar value.
left: The vector.
divisor: The scalar value.
Returns: The vector that results from the division.
"""
pass
@staticmethod
def Dot(vector1, vector2):
"""
Dot(vector1: Vector3, vector2: Vector3) -> Single
Returns the dot product of two vectors.
vector1: The first vector.
vector2: The second vector.
Returns: The dot product.
"""
pass
def Equals(self, *__args):
"""
Equals(self: Vector3, obj: object) -> bool
Returns a value that indicates whether this instance and a specified object are equal.
obj: The object to compare with the current instance.
Returns: true if the current instance and objobj are equal; otherwise, false. If objobj is null, the method returns false.
Equals(self: Vector3, other: Vector3) -> bool
Returns a value that indicates whether this instance and another vector are equal.
other: The other vector.
Returns: true if the two vectors are equal; otherwise, false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Vector3) -> int
Returns the hash code for this instance.
Returns: The hash code.
"""
pass
def Length(self):
"""
Length(self: Vector3) -> Single
Returns the length of this vector object.
Returns: The vector's length.
"""
pass
def LengthSquared(self):
"""
LengthSquared(self: Vector3) -> Single
Returns the length of the vector squared.
Returns: The vector's length squared.
"""
pass
@staticmethod
def Lerp(value1, value2, amount):
"""
Lerp(value1: Vector3, value2: Vector3, amount: Single) -> Vector3
Performs a linear interpolation between two vectors based on the given weighting.
value1: The first vector.
value2: The second vector.
amount: A value between 0 and 1 that indicates the weight of value2.
Returns: The interpolated vector.
"""
pass
@staticmethod
def Max(value1, value2):
"""
Max(value1: Vector3, value2: Vector3) -> Vector3
Returns a vector whose elements are the maximum of each of the pairs of elements in two specified vectors.
value1: The first vector.
value2: The second vector.
Returns: The maximized vector.
"""
pass
@staticmethod
def Min(value1, value2):
"""
Min(value1: Vector3, value2: Vector3) -> Vector3
Returns a vector whose elements are the minimum of each of the pairs of elements in two specified vectors.
value1: The first vector.
value2: The second vector.
Returns: The minimized vector.
"""
pass
@staticmethod
def Multiply(left, right):
"""
Multiply(left: Vector3, right: Vector3) -> Vector3
Multiplies two vectors together.
left: The first vector.
right: The second vector.
Returns: The product vector.
Multiply(left: Vector3, right: Single) -> Vector3
Multiplies a vector by a specified scalar.
left: The vector to multiply.
right: The scalar value.
Returns: The scaled vector.
Multiply(left: Single, right: Vector3) -> Vector3
Multiplies a scalar value by a specified vector.
left: The scaled value.
right: The vector.
Returns: The scaled vector.
"""
pass
@staticmethod
def Negate(value):
"""
Negate(value: Vector3) -> Vector3
Negates a specified vector.
value: The vector to negate.
Returns: The negated vector.
"""
pass
@staticmethod
def Normalize(value):
"""
Normalize(value: Vector3) -> Vector3
Returns a vector with the same direction as the specified vector, but with a length of one.
value: The vector to normalize.
Returns: The normalized vector.
"""
pass
@staticmethod
def Reflect(vector, normal):
"""
Reflect(vector: Vector3, normal: Vector3) -> Vector3
Returns the reflection of a vector off a surface that has the specified normal.
vector: The source vector.
normal: The normal of the surface being reflected off.
Returns: The reflected vector.
"""
pass
@staticmethod
def SquareRoot(value):
"""
SquareRoot(value: Vector3) -> Vector3
Returns a vector whose elements are the square root of each of a specified vector's elements.
value: A vector.
Returns: The square root vector.
"""
pass
@staticmethod
def Subtract(left, right):
"""
Subtract(left: Vector3, right: Vector3) -> Vector3
Subtracts the second vector from the first.
left: The first vector.
right: The second vector.
Returns: The difference vector.
"""
pass
def ToString(self, format=None, formatProvider=None):
"""
ToString(self: Vector3) -> str
Returns the string representation of the current instance using default formatting.
Returns: The string representation of the current instance.
ToString(self: Vector3, format: str) -> str
Returns the string representation of the current instance using the specified format string to format individual elements.
format: A or that defines the format of individual elements.
Returns: The string representation of the current instance.
ToString(self: Vector3, format: str, formatProvider: IFormatProvider) -> str
Returns the string representation of the current instance using the specified format string to format individual elements and the specified format provider to define culture-specific
formatting.
format: A or that defines the format of individual elements.
formatProvider: A format provider that supplies culture-specific formatting information.
Returns: The string representation of the current instance.
"""
pass
@staticmethod
def Transform(*__args):
"""
Transform(position: Vector3, matrix: Matrix4x4) -> Vector3
Transforms a vector by a specified 4x4 matrix.
position: The vector to transform.
matrix: The transformation matrix.
Returns: The transformed vector.
Transform(value: Vector3, rotation: Quaternion) -> Vector3
Transforms a vector by the specified Quaternion rotation value.
value: The vector to rotate.
rotation: The rotation to apply.
Returns: The transformed vector.
"""
pass
@staticmethod
def TransformNormal(normal, matrix):
"""
TransformNormal(normal: Vector3, matrix: Matrix4x4) -> Vector3
Transforms a vector normal by the given 4x4 matrix.
normal: The source vector.
matrix: The matrix.
Returns: The transformed vector.
"""
pass
def __abs__(self, *args): #cannot find CLR method
""" x.__abs__() <==> abs(x) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __div__(self, *args): #cannot find CLR method
""" x.__div__(y) <==> x/yx.__div__(y) <==> x/y """
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __mul__(self, *args): #cannot find CLR method
""" x.__mul__(y) <==> x*yx.__mul__(y) <==> x*y """
pass
def __neg__(self, *args): #cannot find CLR method
""" x.__neg__() <==> -x """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type, value: Single)
__new__(cls: type, value: Vector2, z: Single)
__new__(cls: type, x: Single, y: Single, z: Single)
__new__[Vector3]() -> Vector3
"""
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __radd__(self, *args): #cannot find CLR method
"""
__radd__(left: Vector3, right: Vector3) -> Vector3
Adds two vectors together.
left: The first vector to add.
right: The second vector to add.
Returns: The summed vector.
"""
pass
def __rdiv__(self, *args): #cannot find CLR method
"""
__rdiv__(left: Vector3, right: Vector3) -> Vector3
Divides the first vector by the second.
left: The first vector.
right: The second vector.
Returns: The vector that results from dividing leftleft by rightright.
"""
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __rmul__(self, *args): #cannot find CLR method
"""
__rmul__(left: Vector3, right: Vector3) -> Vector3
Multiplies two vectors together.
left: The first vector.
right: The second vector.
Returns: The product vector.
__rmul__(left: Single, right: Vector3) -> Vector3
Multiples the scalar value by the specified vector.
left: The vector.
right: The scalar value.
Returns: The scaled vector.
"""
pass
def __rsub__(self, *args): #cannot find CLR method
"""
__rsub__(left: Vector3, right: Vector3) -> Vector3
Subtracts the second vector from the first.
left: The first vector.
right: The second vector.
Returns: The vector that results from subtracting rightright from leftleft.
"""
pass
def __str__(self, *args): #cannot find CLR method
pass
def __sub__(self, *args): #cannot find CLR method
""" x.__sub__(y) <==> x-y """
pass
One = None
UnitX = None
UnitY = None
UnitZ = None
X = None
Y = None
Z = None
Zero = None
class Vector4(object, IEquatable[Vector4], IFormattable):
"""
Represents a vector with four single-precision floating-point values.
Vector4(value: Single)
Vector4(x: Single, y: Single, z: Single, w: Single)
Vector4(value: Vector2, z: Single, w: Single)
Vector4(value: Vector3, w: Single)
"""
@staticmethod
def Abs(value):
"""
Abs(value: Vector4) -> Vector4
Returns a vector whose elements are the absolute values of each of the specified vector's elements.
value: A vector.
Returns: The absolute value vector.
"""
pass
@staticmethod
def Add(left, right):
"""
Add(left: Vector4, right: Vector4) -> Vector4
Adds two vectors together.
left: The first vector to add.
right: The second vector to add.
Returns: The summed vector.
"""
pass
@staticmethod
def Clamp(value1, min, max):
"""
Clamp(value1: Vector4, min: Vector4, max: Vector4) -> Vector4
Restricts a vector between a minimum and a maximum value.
value1: The vector to restrict.
min: The minimum value.
max: The maximum value.
Returns: The restricted vector.
"""
pass
def CopyTo(self, array, index=None):
"""
CopyTo(self: Vector4, array: Array[Single])
Copies the elements of the vector to a specified array.
array: The destination array.
CopyTo(self: Vector4, array: Array[Single], index: int)
Copies the elements of the vector to a specified array starting at a specified index position.
array: The destination array.
index: The index at which to copy the first element of the vector.
"""
pass
@staticmethod
def Distance(value1, value2):
"""
Distance(value1: Vector4, value2: Vector4) -> Single
Computes the Euclidean distance between the two given points.
value1: The first point.
value2: The second point.
Returns: The distance.
"""
pass
@staticmethod
def DistanceSquared(value1, value2):
"""
DistanceSquared(value1: Vector4, value2: Vector4) -> Single
Returns the Euclidean distance squared between two specified points.
value1: The first point.
value2: The second point.
Returns: The distance squared.
"""
pass
@staticmethod
def Divide(left, *__args):
"""
Divide(left: Vector4, right: Vector4) -> Vector4
Divides the first vector by the second.
left: The first vector.
right: The second vector.
Returns: The vector resulting from the division.
Divide(left: Vector4, divisor: Single) -> Vector4
Divides the specified vector by a specified scalar value.
left: The vector.
divisor: The scalar value.
Returns: The vector that results from the division.
"""
pass
@staticmethod
def Dot(vector1, vector2):
"""
Dot(vector1: Vector4, vector2: Vector4) -> Single
Returns the dot product of two vectors.
vector1: The first vector.
vector2: The second vector.
Returns: The dot product.
"""
pass
def Equals(self, *__args):
"""
Equals(self: Vector4, obj: object) -> bool
Returns a value that indicates whether this instance and a specified object are equal.
obj: The object to compare with the current instance.
Returns: true if the current instance and objobj are equal; otherwise, false. If objobj is null, the method returns false.
Equals(self: Vector4, other: Vector4) -> bool
Returns a value that indicates whether this instance and another vector are equal.
other: The other vector.
Returns: true if the two vectors are equal; otherwise, false.
"""
pass
def GetHashCode(self):
"""
GetHashCode(self: Vector4) -> int
Returns the hash code for this instance.
Returns: The hash code.
"""
pass
def Length(self):
"""
Length(self: Vector4) -> Single
Returns the length of this vector object.
Returns: The vector's length.
"""
pass
def LengthSquared(self):
"""
LengthSquared(self: Vector4) -> Single
Returns the length of the vector squared.
Returns: The vector's length squared.
"""
pass
@staticmethod
def Lerp(value1, value2, amount):
"""
Lerp(value1: Vector4, value2: Vector4, amount: Single) -> Vector4
Performs a linear interpolation between two vectors based on the given weighting.
value1: The first vector.
value2: The second vector.
amount: A value between 0 and 1 that indicates the weight of value2.
Returns: The interpolated vector.
"""
pass
@staticmethod
def Max(value1, value2):
"""
Max(value1: Vector4, value2: Vector4) -> Vector4
Returns a vector whose elements are the maximum of each of the pairs of elements in two specified vectors.
value1: The first vector.
value2: The second vector.
Returns: The maximized vector.
"""
pass
@staticmethod
def Min(value1, value2):
"""
Min(value1: Vector4, value2: Vector4) -> Vector4
Returns a vector whose elements are the minimum of each of the pairs of elements in two specified vectors.
value1: The first vector.
value2: The second vector.
Returns: The minimized vector.
"""
pass
@staticmethod
def Multiply(left, right):
"""
Multiply(left: Vector4, right: Vector4) -> Vector4
Multiplies two vectors together.
left: The first vector.
right: The second vector.
Returns: The product vector.
Multiply(left: Vector4, right: Single) -> Vector4
Multiplies a vector by a specified scalar.
left: The vector to multiply.
right: The scalar value.
Returns: The scaled vector.
Multiply(left: Single, right: Vector4) -> Vector4
Multiplies a scalar value by a specified vector.
left: The scaled value.
right: The vector.
Returns: The scaled vector.
"""
pass
@staticmethod
def Negate(value):
"""
Negate(value: Vector4) -> Vector4
Negates a specified vector.
value: The vector to negate.
Returns: The negated vector.
"""
pass
@staticmethod
def Normalize(vector):
"""
Normalize(vector: Vector4) -> Vector4
Returns a vector with the same direction as the specified vector, but with a length of one.
vector: The vector to normalize.
Returns: The normalized vector.
"""
pass
@staticmethod
def SquareRoot(value):
"""
SquareRoot(value: Vector4) -> Vector4
Returns a vector whose elements are the square root of each of a specified vector's elements.
value: A vector.
Returns: The square root vector.
"""
pass
@staticmethod
def Subtract(left, right):
"""
Subtract(left: Vector4, right: Vector4) -> Vector4
Subtracts the second vector from the first.
left: The first vector.
right: The second vector.
Returns: The difference vector.
"""
pass
def ToString(self, format=None, formatProvider=None):
"""
ToString(self: Vector4) -> str
Returns the string representation of the current instance using default formatting.
Returns: The string representation of the current instance.
ToString(self: Vector4, format: str) -> str
Returns the string representation of the current instance using the specified format string to format individual elements.
format: A or that defines the format of individual elements.
Returns: The string representation of the current instance.
ToString(self: Vector4, format: str, formatProvider: IFormatProvider) -> str
Returns the string representation of the current instance using the specified format string to format individual elements and the specified format provider to define culture-specific
formatting.
format: A or that defines the format of individual elements.
formatProvider: A format provider that supplies culture-specific formatting information.
Returns: The string representation of the current instance.
"""
pass
@staticmethod
def Transform(*__args):
"""
Transform(position: Vector2, matrix: Matrix4x4) -> Vector4
Transforms a two-dimensional vector by a specified 4x4 matrix.
position: The vector to transform.
matrix: The transformation matrix.
Returns: The transformed vector.
Transform(position: Vector3, matrix: Matrix4x4) -> Vector4
Transforms a three-dimensional vector by a specified 4x4 matrix.
position: The vector to transform.
matrix: The transformation matrix.
Returns: The transformed vector.
Transform(vector: Vector4, matrix: Matrix4x4) -> Vector4
Transforms a four-dimensional vector by a specified 4x4 matrix.
vector: The vector to transform.
matrix: The transformation matrix.
Returns: The transformed vector.
Transform(value: Vector2, rotation: Quaternion) -> Vector4
Transforms a two-dimensional vector by the specified Quaternion rotation value.
value: The vector to rotate.
rotation: The rotation to apply.
Returns: The transformed vector.
Transform(value: Vector3, rotation: Quaternion) -> Vector4
Transforms a three-dimensional vector by the specified Quaternion rotation value.
value: The vector to rotate.
rotation: The rotation to apply.
Returns: The transformed vector.
Transform(value: Vector4, rotation: Quaternion) -> Vector4
Transforms a four-dimensional vector by the specified Quaternion rotation value.
value: The vector to rotate.
rotation: The rotation to apply.
Returns: The transformed vector.
"""
pass
def __abs__(self, *args): #cannot find CLR method
""" x.__abs__() <==> abs(x) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __div__(self, *args): #cannot find CLR method
""" x.__div__(y) <==> x/yx.__div__(y) <==> x/y """
pass
def __eq__(self, *args): #cannot find CLR method
""" x.__eq__(y) <==> x==y """
pass
def __format__(self, *args): #cannot find CLR method
""" __format__(formattable: IFormattable, format: str) -> str """
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __mul__(self, *args): #cannot find CLR method
""" x.__mul__(y) <==> x*yx.__mul__(y) <==> x*y """
pass
def __neg__(self, *args): #cannot find CLR method
""" x.__neg__() <==> -x """
pass
@staticmethod # known case of __new__
def __new__(self, *__args):
"""
__new__(cls: type, value: Single)
__new__(cls: type, x: Single, y: Single, z: Single, w: Single)
__new__(cls: type, value: Vector2, z: Single, w: Single)
__new__(cls: type, value: Vector3, w: Single)
__new__[Vector4]() -> Vector4
"""
pass
def __ne__(self, *args): #cannot find CLR method
pass
def __radd__(self, *args): #cannot find CLR method
"""
__radd__(left: Vector4, right: Vector4) -> Vector4
Adds two vectors together.
left: The first vector to add.
right: The second vector to add.
Returns: The summed vector.
"""
pass
def __rdiv__(self, *args): #cannot find CLR method
"""
__rdiv__(left: Vector4, right: Vector4) -> Vector4
Divides the first vector by the second.
left: The first vector.
right: The second vector.
Returns: The vector that results from dividing leftleft by rightright.
"""
pass
def __repr__(self, *args): #cannot find CLR method
""" __repr__(self: object) -> str """
pass
def __rmul__(self, *args): #cannot find CLR method
"""
__rmul__(left: Vector4, right: Vector4) -> Vector4
Multiplies two vectors together.
left: The first vector.
right: The second vector.
Returns: The product vector.
__rmul__(left: Single, right: Vector4) -> Vector4
Multiples the scalar value by the specified vector.
left: The vector.
right: The scalar value.
Returns: The scaled vector.
"""
pass
def __rsub__(self, *args): #cannot find CLR method
"""
__rsub__(left: Vector4, right: Vector4) -> Vector4
Subtracts the second vector from the first.
left: The first vector.
right: The second vector.
Returns: The vector that results from subtracting rightright from leftleft.
"""
pass
def __str__(self, *args): #cannot find CLR method
pass
def __sub__(self, *args): #cannot find CLR method
""" x.__sub__(y) <==> x-y """
pass
One = None
UnitW = None
UnitX = None
UnitY = None
UnitZ = None
W = None
X = None
Y = None
Z = None
Zero = None
| 34.694737
| 236
| 0.557695
| 10,383
| 102,176
| 5.377925
| 0.048156
| 0.04316
| 0.035728
| 0.029334
| 0.833844
| 0.803918
| 0.780816
| 0.744927
| 0.715701
| 0.702967
| 0
| 0.018382
| 0.371731
| 102,176
| 2,944
| 237
| 34.706522
| 0.851466
| 0.63971
| 0
| 0.914201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.362426
| false
| 0.362426
| 0
| 0
| 0.461538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
0609714bb2135e06b5df6565610a52796ccafde7
| 13,409
|
py
|
Python
|
APY/code/datagen.py
|
mvp18/gAL-MELEX
|
58fd26b41d40acac3ab98f37c34e4de2d757ac01
|
[
"MIT"
] | null | null | null |
APY/code/datagen.py
|
mvp18/gAL-MELEX
|
58fd26b41d40acac3ab98f37c34e4de2d757ac01
|
[
"MIT"
] | null | null | null |
APY/code/datagen.py
|
mvp18/gAL-MELEX
|
58fd26b41d40acac3ab98f37c34e4de2d757ac01
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
from torch.utils import data
from scipy import io
from collections import defaultdict
import pickle
def load_data():
res101 = io.loadmat('../../xlsa17/data/APY/res101.mat')
att_splits = io.loadmat('../att_splits.mat')
prior_matrix = att_splits['att']
allclass_names=att_splits['allclasses_names']
train_loc = 'train_loc'
val_loc = 'val_loc'
test_loc = 'test_unseen_loc'
X = res101['features']
X = X.transpose()
train_X = X[np.squeeze(att_splits[train_loc]-1)]
val_X = X[np.squeeze(att_splits[val_loc]-1)]
test_X = X[np.squeeze(att_splits[test_loc]-1)]
all_classes = res101['labels']
train_classes = np.squeeze(all_classes[np.squeeze(att_splits[train_loc]-1)])
val_classes = np.squeeze(all_classes[np.squeeze(att_splits[val_loc]-1)])
test_classes = np.squeeze(all_classes[np.squeeze(att_splits[test_loc]-1)])
prior_matrix = att_splits['att']
prior_matrix_tr = prior_matrix[:,(np.unique(train_classes)-1)]
prior_matrix_val = prior_matrix[:,(np.unique(val_classes)-1)]
prior_matrix_ts = prior_matrix[:,(np.unique(test_classes)-1)]
train_img=res101['image_files'][np.squeeze(att_splits[train_loc]-1)]
val_img=res101['image_files'][np.squeeze(att_splits[val_loc]-1)]
test_img=res101['image_files'][np.squeeze(att_splits[test_loc]-1)]
train_img_names=[]
val_img_names=[]
test_img_names=[]
for i in range(train_img.shape[0]):
train_img_names.append(train_img[i][0][0].split('ages/')[1]+'+'+allclass_names[train_classes[i]-1][0][0])
for i in range(test_img.shape[0]):
test_img_names.append(test_img[i][0][0].split('ages/')[1]+'+'+allclass_names[test_classes[i]-1][0][0])
for i in range(val_img.shape[0]):
val_img_names.append(val_img[i][0][0].split('ages/')[1]+'+'+allclass_names[val_classes[i]-1][0][0])
all_img_names=train_img_names+val_img_names+test_img_names
rept_img_dict={}
for i in range(len(all_img_names)):
rept_img_dict[all_img_names[i]]=0
img2att={}
apascal_train=open('../attribute_data/apascal_train.txt').readlines()
apascal_test=open('../attribute_data/apascal_test.txt').readlines()
ayahoo_test=open('../attribute_data/ayahoo_test.txt').readlines()
for i in range(len(apascal_train)):
img_name=apascal_train[i].split(' ')[0]+'+'+apascal_train[i].split(' ')[1]
if all_img_names.count(img_name)>1:
suffix=str(rept_img_dict[img_name]+1)
rept_img_dict[img_name]+=1
else:
suffix='1'
bin_att=[]
for j in range(64):
bin_att.append(float(apascal_train[i].split(' ')[j+6]))
img2att[img_name+'_'+suffix]=np.array(bin_att)
for a in range(len(apascal_test)):
img_name=apascal_test[a].split(' ')[0]+'+'+apascal_test[a].split(' ')[1]
if all_img_names.count(img_name)>1:
suffix=str(rept_img_dict[img_name]+1)
rept_img_dict[img_name]+=1
else:
suffix='1'
bin_att=[]
for b in range(64):
bin_att.append(float(apascal_test[a].split(' ')[b+6]))
img2att[img_name+'_'+suffix]=np.array(bin_att)
for x in range(len(ayahoo_test)):
img_name=ayahoo_test[x].split(' ')[0]+'+'+ayahoo_test[x].split(' ')[1]
if all_img_names.count(img_name)>1:
suffix=str(rept_img_dict[img_name]+1)
rept_img_dict[img_name]+=1
else:
suffix='1'
bin_att=[]
for y in range(64):
bin_att.append(float(ayahoo_test[x].split(' ')[y+6]))
img2att[img_name+'_'+suffix]=np.array(bin_att)
train_att={}
for img_name in set(train_img_names):
if rept_img_dict[img_name]!=0:
for j in range(rept_img_dict[img_name]):
if j:
temp_arr=np.vstack([temp_arr, np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)])
else:
temp_arr=np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)
train_att[img_name]=np.round(np.mean(temp_arr,0))
else:
train_att[img_name]=img2att[img_name+'_1']
val_att={}
for img_name in set(val_img_names):
if rept_img_dict[img_name]!=0:
for j in range(rept_img_dict[img_name]):
if j:
temp_arr=np.vstack([temp_arr, np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)])
else:
temp_arr=np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)
val_att[img_name]=np.round(np.mean(temp_arr,0))
else:
val_att[img_name]=img2att[img_name+'_1']
test_att={}
for img_name in set(test_img_names):
if rept_img_dict[img_name]!=0:
for j in range(rept_img_dict[img_name]):
if j:
temp_arr=np.vstack([temp_arr, np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)])
else:
temp_arr=np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)
test_att[img_name]=np.round(np.mean(temp_arr,0))
else:
test_att[img_name]=img2att[img_name+'_1']
img_name_list=[train_img_names, val_img_names, test_img_names]
feature_list = [train_X, val_X, test_X]
class_list = [train_classes, val_classes, test_classes]
signature_list = [prior_matrix_tr, prior_matrix_val, prior_matrix_ts]
att_list=[train_att, val_att, test_att]
return img_name_list, class_list, signature_list, feature_list, att_list, img2att, rept_img_dict
def load_data_custom_split(fp_class_splits):
cls_splits = np.load(fp_class_splits, allow_pickle=True).item()
res101 = io.loadmat('../../xlsa17/data/APY/res101.mat')
att_splits = io.loadmat('../att_splits.mat')
prior_matrix = att_splits['att']
all_classes = res101['labels']
allclass_names=att_splits['allclasses_names']
train_loc = []
val_loc = []
test_loc = []
for i, label in enumerate(all_classes):
if allclass_names[label-1] in cls_splits['train_cls']:
train_loc.append(i)
elif allclass_names[label-1] in cls_splits['val_cls']:
val_loc.append(i)
elif allclass_names[label-1] in cls_splits['test_cls']:
test_loc.append(i)
X = res101['features']
X = X.transpose()
train_X = X[np.squeeze(train_loc)]
val_X = X[np.squeeze(val_loc)]
test_X = X[np.squeeze(test_loc)]
train_classes = np.squeeze(all_classes[np.squeeze(train_loc)])
val_classes = np.squeeze(all_classes[np.squeeze(val_loc)])
test_classes = np.squeeze(all_classes[np.squeeze(test_loc)])
prior_matrix = att_splits['att']
prior_matrix_tr = prior_matrix[:,(np.unique(train_classes)-1)]
prior_matrix_val = prior_matrix[:,(np.unique(val_classes)-1)]
prior_matrix_ts = prior_matrix[:,(np.unique(test_classes)-1)]
train_img=res101['image_files'][np.squeeze(train_loc)]
val_img=res101['image_files'][np.squeeze(val_loc)]
test_img=res101['image_files'][np.squeeze(test_loc)]
train_img_names=[]
val_img_names=[]
test_img_names=[]
for i in range(train_img.shape[0]):
train_img_names.append(train_img[i][0][0].split('ages/')[1]+'+'+allclass_names[train_classes[i]-1][0][0])
for i in range(test_img.shape[0]):
test_img_names.append(test_img[i][0][0].split('ages/')[1]+'+'+allclass_names[test_classes[i]-1][0][0])
for i in range(val_img.shape[0]):
val_img_names.append(val_img[i][0][0].split('ages/')[1]+'+'+allclass_names[val_classes[i]-1][0][0])
all_img_names=train_img_names+val_img_names+test_img_names
rept_img_dict={}
for i in range(len(all_img_names)):
rept_img_dict[all_img_names[i]]=0
img2att={}
apascal_train=open('../attribute_data/apascal_train.txt').readlines()
apascal_test=open('../attribute_data/apascal_test.txt').readlines()
ayahoo_test=open('../attribute_data/ayahoo_test.txt').readlines()
for i in range(len(apascal_train)):
img_name=apascal_train[i].split(' ')[0]+'+'+apascal_train[i].split(' ')[1]
if all_img_names.count(img_name)>1:
suffix=str(rept_img_dict[img_name]+1)
rept_img_dict[img_name]+=1
else:
suffix='1'
bin_att=[]
for j in range(64):
bin_att.append(float(apascal_train[i].split(' ')[j+6]))
img2att[img_name+'_'+suffix]=np.array(bin_att)
for a in range(len(apascal_test)):
img_name=apascal_test[a].split(' ')[0]+'+'+apascal_test[a].split(' ')[1]
if all_img_names.count(img_name)>1:
suffix=str(rept_img_dict[img_name]+1)
rept_img_dict[img_name]+=1
else:
suffix='1'
bin_att=[]
for b in range(64):
bin_att.append(float(apascal_test[a].split(' ')[b+6]))
img2att[img_name+'_'+suffix]=np.array(bin_att)
for x in range(len(ayahoo_test)):
img_name=ayahoo_test[x].split(' ')[0]+'+'+ayahoo_test[x].split(' ')[1]
if all_img_names.count(img_name)>1:
suffix=str(rept_img_dict[img_name]+1)
rept_img_dict[img_name]+=1
else:
suffix='1'
bin_att=[]
for y in range(64):
bin_att.append(float(ayahoo_test[x].split(' ')[y+6]))
img2att[img_name+'_'+suffix]=np.array(bin_att)
train_att={}
for img_name in set(train_img_names):
if rept_img_dict[img_name]!=0:
for j in range(rept_img_dict[img_name]):
if j:
temp_arr=np.vstack([temp_arr, np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)])
else:
temp_arr=np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)
train_att[img_name]=np.round(np.mean(temp_arr,0))
else:
train_att[img_name]=img2att[img_name+'_1']
val_att={}
for img_name in set(val_img_names):
if rept_img_dict[img_name]!=0:
for j in range(rept_img_dict[img_name]):
if j:
temp_arr=np.vstack([temp_arr, np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)])
else:
temp_arr=np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)
val_att[img_name]=np.round(np.mean(temp_arr,0))
else:
val_att[img_name]=img2att[img_name+'_1']
test_att={}
for img_name in set(test_img_names):
if rept_img_dict[img_name]!=0:
for j in range(rept_img_dict[img_name]):
if j:
temp_arr=np.vstack([temp_arr, np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)])
else:
temp_arr=np.expand_dims(img2att[img_name+'_'+str(j+1)], 0)
test_att[img_name]=np.round(np.mean(temp_arr,0))
else:
test_att[img_name]=img2att[img_name+'_1']
img_name_list=[train_img_names, val_img_names, test_img_names]
feature_list = [train_X, val_X, test_X]
class_list = [train_classes, val_classes, test_classes]
signature_list = [prior_matrix_tr, prior_matrix_val, prior_matrix_ts]
att_list=[train_att, val_att, test_att]
return img_name_list, class_list, signature_list, feature_list, att_list, img2att, rept_img_dict
class Dataset(data.Dataset):
def __init__(self, list_IDs, data_dict, labels_dict, class_list, eszsl_classes, attributes, attribute_groups, groups, adv_dict=[], zero_shot=0):
self.data_dict = data_dict
self.labels_dict = labels_dict
self.class_list = class_list
self.eszsl_classes = eszsl_classes
self.list_IDs = list_IDs
self.attributes = attributes
self.attribute_groups = attribute_groups
self.groups = groups
self.adv_dict = adv_dict
self.zero_shot = zero_shot
def __len__(self):
return len(self.list_IDs)
def __getitem__(self, index):
# Select sample
ID = self.list_IDs[index]
X, y = self.__data_generation(ID, index)
return X, y
def __data_generation(self, ID, index):
def _create_feed():
label = self.labels_dict[ID]
feed_dict={}
for group in self.groups:
att_indices_per_group = [self.attributes.index(att) for att in self.attribute_groups[group]]
feed_dict[group] = np.array(label)[att_indices_per_group]
return dict([(k, np.array(v, dtype=np.float32)) for k, v in feed_dict.items()])
# Generate data
if self.zero_shot:
group_dict = _create_feed()
if self.zero_shot==2:
y_dict = group_dict
else:
y_dict={}
y_dict['conc_l'] = self.eszsl_classes.index(self.class_list[index])
if self.adv_dict:
for adv_branch in self.adv_dict:
y_dict[adv_branch['node_name']] = group_dict[adv_branch['group']]
else:
y_dict = _create_feed()
if self.adv_dict:
for adv_branch in self.adv_dict:
y_dict[adv_branch['node_name']] = y_dict[adv_branch['group']]
X = self.data_dict[index]
X = torch.from_numpy(X)
return X, y_dict
| 38.531609
| 148
| 0.614886
| 2,002
| 13,409
| 3.787213
| 0.066933
| 0.075706
| 0.043524
| 0.044315
| 0.830915
| 0.811395
| 0.810868
| 0.793063
| 0.741493
| 0.724083
| 0
| 0.023192
| 0.241107
| 13,409
| 347
| 149
| 38.642651
| 0.721895
| 0.002014
| 0
| 0.725352
| 0
| 0
| 0.046715
| 0.020031
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024648
| false
| 0
| 0.021127
| 0.003521
| 0.070423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ae71fe96cdd81971431a30b1dc2185700e64da17
| 134
|
py
|
Python
|
venv/Lib/site-packages/meld3/meld3.py
|
hanxt6/BKframeworkObj
|
91a4869db3d48fd4bb10507acbddd94c7c921091
|
[
"Apache-2.0"
] | 39
|
2016-12-05T14:36:37.000Z
|
2021-07-29T18:22:34.000Z
|
venv/Lib/site-packages/meld3/meld3.py
|
hanxt6/BKframeworkObj
|
91a4869db3d48fd4bb10507acbddd94c7c921091
|
[
"Apache-2.0"
] | 68
|
2016-12-12T20:38:47.000Z
|
2020-07-26T18:28:49.000Z
|
py2env/lib/python2.7/site-packages/meld3/meld3.py
|
xiaofam/bluekingDevops_tmp
|
fb8eb6f6eae4d56f752717a3f31f39f17f88fa14
|
[
"Apache-2.0"
] | 120
|
2016-08-18T14:53:03.000Z
|
2020-06-16T13:27:20.000Z
|
from . import parse_xml # BBB
from . import parse_html # BBB
from . import parse_xmlstring # BBB
from . import parse_htmlstring # BBB
| 26.8
| 36
| 0.761194
| 20
| 134
| 4.9
| 0.4
| 0.408163
| 0.612245
| 0.55102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179104
| 134
| 4
| 37
| 33.5
| 0.890909
| 0.11194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ae9117c6047425048ddbc1b22435e9764c833045
| 9,832
|
py
|
Python
|
emulatte/core/kernels.py
|
WasedaGeophysics/w1dem
|
487e117ad0f7b74367f22ad404bd4f6adf473a7b
|
[
"Apache-2.0"
] | 1
|
2021-12-13T00:15:20.000Z
|
2021-12-13T00:15:20.000Z
|
emulatte/core/kernels.py
|
WasedaGeophysics/w1dem
|
487e117ad0f7b74367f22ad404bd4f6adf473a7b
|
[
"Apache-2.0"
] | null | null | null |
emulatte/core/kernels.py
|
WasedaGeophysics/w1dem
|
487e117ad0f7b74367f22ad404bd4f6adf473a7b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Waseda Geophysics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
import numpy as np
from scipy.special import erf, erfc, jn
from emulatte.utils.function import kroneckers_delta
def compute_kernel_vmd(model, omega):
"""
"""
U_te, U_tm, D_te, D_tm, e_up, e_down = model.compute_coefficients(omega)
kernel_te = U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
+ kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel_te_hr = U_te[model.rlayer - 1] * e_up \
- D_te[model.rlayer - 1] * e_down \
+ kroneckers_delta(model.rlayer, model.slayer) \
* (model.rz - model.sz) / np.abs(model.rz - model.sz) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel_e_phi = kernel_te * model.lambda_ ** 2 \
/ model.u[model.slayer - 1]
kernel_h_r = kernel_te_hr * model.lambda_ ** 2 \
* model.u[model.rlayer - 1] \
/ model.u[model.slayer - 1]
kernel_h_z = kernel_e_phi * model.lambda_
kernel = [kernel_e_phi, kernel_h_r, kernel_h_z]
kernel = np.array(kernel)
model.kernel = kernel
return kernel
def compute_kernel_hmd(model, omega):
"""
"""
U_te, U_tm, D_te, D_tm, e_up, e_down = model.compute_coefficients(omega)
kernel_tm_er = (-U_tm[model.rlayer - 1] * e_up \
+ D_tm[model.rlayer - 1] * e_down \
- np.sign(model.rz - model.sz) \
* kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz -model.sz))) \
* model.u[model.rlayer - 1] \
/ model.u[model.slayer - 1]
kernel_te_er = U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
+ np.sign(model.rz - model.sz) \
* kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel_tm_ez = (U_tm[model.rlayer - 1] * e_up \
+ D_tm[model.rlayer - 1] * e_down \
+ kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz -model.sz))) \
/ model.u[model.slayer - 1]
kernel_tm_hr = (U_tm[model.rlayer - 1] * e_up \
+ D_tm[model.rlayer - 1] * e_down \
+ kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz -model.sz))) \
/ model.u[model.slayer - 1]
kernel_te_hr = (-U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
- kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))) \
* model.u[model.rlayer - 1]
kernel_te_hz = U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
+ np.sign(model.rz - model.sz) \
* kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel = [kernel_tm_er , kernel_te_er, kernel_tm_ez,
kernel_tm_hr, kernel_te_hr, kernel_te_hz]
kernel = np.array(kernel)
return kernel
def compute_kernel_ved(model, omega):
"""
"""
U_te, U_tm, D_te, D_tm, e_up, e_down = model.compute_coefficients(omega)
kernel_tm = U_tm[model.rlayer - 1] * e_up \
+ D_tm[model.rlayer - 1] * e_down \
+ kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel_tm_er = -U_tm[model.rlayer - 1] * e_up \
+ D_tm[model.rlayer - 1] * e_down \
- (model.rz - model.sz) / np.abs(model.rz - model.sz) \
* kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel_e_phi = kernel_tm_er * model.u[model.rlayer - 1] \
/ model.u[model.slayer - 1]
kernel_e_z = kernel_tm / model.u[model.slayer - 1]
kernel_h_r = kernel_tm / model.u[model.slayer - 1]
kernel = np.array([kernel_e_phi, kernel_e_z ,kernel_h_r])
return kernel
def compute_kernel_hed(model, omega):
"""
"""
U_te, U_tm, D_te, D_tm, e_up, e_down = model.compute_coefficients(omega)
kernel_tm_er = (-U_tm[model.rlayer - 1] * e_up \
+ D_tm[model.rlayer - 1] * e_down \
- kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))) \
* model.u[model.rlayer - 1]
kernel_te_er = (U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
+ kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))) \
/ model.u[model.slayer - 1]
kernel_tm_ez = U_tm[model.rlayer - 1] * e_up \
+ D_tm[model.rlayer - 1] * e_down \
+ (1-kroneckers_delta(model.rz - 1e-2, model.sz)) \
* np.sign(model.rz - model.sz) \
* kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel_tm_hr = U_tm[model.rlayer - 1] * e_up \
+ D_tm[model.rlayer - 1] * e_down \
+ np.sign(model.rz - model.sz) \
* kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel_te_hr = (-U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
- np.sign(model.rz - model.sz) \
* kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))) \
* model.u[model.rlayer - 1] \
/ model.u[model.slayer - 1]
kernel_te_hz = kernel_te_er
kernel = np.array([kernel_tm_er , kernel_te_er, kernel_tm_ez,
kernel_tm_hr, kernel_te_hr, kernel_te_hz])
return kernel
def compute_kernel_circular(model, omega):
"""
"""
U_te, U_tm, D_te, D_tm, e_up, e_down = model.compute_coefficients(omega)
kernel_te = U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
+ kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
kernel_te_hr = -U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
- kroneckers_delta(model.rlayer, model.slayer) \
* (model.rz - model.sz) / np.abs(model.rz - model.sz) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
besk1 = jn(1, model.lambda_ * model.r)
besk0 = jn(0, model.lambda_ * model.r)
kernel_e_phi = kernel_te * model.lambda_ * besk1 \
/ model.u[model.slayer - 1]
kernel_h_r = kernel_te_hr * model.lambda_ * besk1 \
* model.u[model.rlayer - 1] \
/ model.u[model.slayer - 1]
kernel_h_z = kernel_te * model.lambda_ ** 2 * besk0 \
/ model.u[model.slayer - 1]
kernel = [kernel_e_phi, kernel_h_r, kernel_h_z]
kernel = np.array(kernel)
return kernel
def compute_kernel_coincident(model, omega):
"""
"""
U_te, U_tm, D_te, D_tm, e_up, e_down = model.compute_coefficients(omega)
kernel_te = U_te[model.rlayer - 1] * e_up \
+ D_te[model.rlayer - 1] * e_down \
- kroneckers_delta(model.rlayer, model.slayer) \
* np.exp(-model.u[model.slayer - 1] \
* np.abs(model.rz - model.sz))
besk1rad = jn(1, model.lambda_ * model.src.radius)
kernel_h_z = kernel_te * model.lambda_ * besk1rad \
/ model.u[model.slayer - 1]
kernel = np.array(kernel_h_z)
return kernel
| 47.728155
| 76
| 0.509255
| 1,276
| 9,832
| 3.712382
| 0.097962
| 0.141651
| 0.10893
| 0.098797
| 0.833228
| 0.813173
| 0.799662
| 0.782352
| 0.780241
| 0.742031
| 0
| 0.015995
| 0.364117
| 9,832
| 205
| 77
| 47.960976
| 0.741683
| 0.059601
| 0
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036364
| false
| 0
| 0.018182
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8836be269ed1cb7436ccaae35d11b4e6664baba1
| 136
|
py
|
Python
|
src/main/interfaces/__init__.py
|
NLGS2907/Alg1-Lector-de-Ejercicios
|
bb7e44bd8e5fd7420a61108e5ecb246b510b396b
|
[
"MIT"
] | 4
|
2021-09-23T16:06:18.000Z
|
2021-09-23T23:17:32.000Z
|
src/main/interfaces/__init__.py
|
NLGS2907/Alg1-Lector-de-Ejercicios
|
bb7e44bd8e5fd7420a61108e5ecb246b510b396b
|
[
"MIT"
] | null | null | null |
src/main/interfaces/__init__.py
|
NLGS2907/Alg1-Lector-de-Ejercicios
|
bb7e44bd8e5fd7420a61108e5ecb246b510b396b
|
[
"MIT"
] | null | null | null |
"""
Paquete de interfaces.
"""
from .ui_ejercicios import *
from .ui_general import *
from .ui_ppt import *
from .ui_unidades import *
| 15.111111
| 28
| 0.727941
| 19
| 136
| 5
| 0.526316
| 0.252632
| 0.378947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161765
| 136
| 8
| 29
| 17
| 0.833333
| 0.161765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
883f476a0ce65a69014a77fac39b4560ee040bf9
| 15,696
|
py
|
Python
|
tests/unit/models/field/text.py
|
RaenonX/Jelly-Bot-API
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 5
|
2020-08-26T20:12:00.000Z
|
2020-12-11T16:39:22.000Z
|
tests/unit/models/field/text.py
|
RaenonX/Jelly-Bot
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 234
|
2019-12-14T03:45:19.000Z
|
2020-08-26T18:55:19.000Z
|
tests/unit/models/field/text.py
|
RaenonX/Jelly-Bot-API
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 2
|
2019-10-23T15:21:15.000Z
|
2020-05-22T09:35:55.000Z
|
from typing import Type, Any, Tuple
from django.utils.functional import Promise
from models.field import TextField, BaseField
from models.field.exceptions import (
FieldTypeMismatchError, FieldNoneNotAllowedError, FieldEmptyValueNotAllowedError,
FieldError, FieldMaxLengthReachedError, FieldRegexNotMatchError, FieldInvalidDefaultValueError
)
from tests.base import TestCase
from ._test_val import TestFieldValue
from ._test_prop import TestFieldProperty
__all__ = ["TestTextFieldExtra", "TestTextFieldProperty", "TestTextFieldValueAllowNone",
"TestTextFieldValueDefault", "TestTextFieldValueDifferentMaxLength", "TestTextFieldValueMustHaveContent",
"TestTextFieldValueNoAutocast", "TestTextFieldValueWithRegex", "TestTextFieldValueNoStrip"]
class TestTextFieldProperty(TestFieldProperty.TestClass):
def get_field_class(self) -> Type[BaseField]:
return TextField
def valid_not_none_obj_value(self) -> Any:
return "A"
def expected_none_object(self) -> Any:
return ""
def get_valid_default_values(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("A", "A"),
("OXX", "OXX"),
("", ""),
(" ", ""),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), "x" * (TextField.DEFAULT_MAX_LENGTH - 1)),
("x" * TextField.DEFAULT_MAX_LENGTH, "x" * TextField.DEFAULT_MAX_LENGTH),
(True, "True"),
(7, "7")
)
def get_invalid_default_values(self) -> Tuple[Any, ...]:
return "x" * (TextField.DEFAULT_MAX_LENGTH + 1), [7, 9], {7: 9}, {7, 9}, (7, 9)
def get_expected_types(self) -> Tuple[Type[Any], ...]:
return str, int, bool, Promise
def get_desired_type(self) -> Type[Any]:
return str
class TestTextFieldValueDefault(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return TextField("k")
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
("", True),
(" ", True),
("A", True),
("OXX", True),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), True),
("x" * TextField.DEFAULT_MAX_LENGTH, True),
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), True),
(True, True),
(7, True),
(object(), False),
([7, 9], False),
({7: 9}, False),
({7, 9}, False),
((7, 9), False)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
("", True),
(" ", True),
("A", True),
("OXX", True),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), True),
("x" * TextField.DEFAULT_MAX_LENGTH, True),
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), False),
(True, True),
(7, True),
(object(), False),
([7, 9], False),
({7: 9}, False),
({7, 9}, False),
((7, 9), False)
)
def is_auto_cast(self) -> bool:
return True
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("", ""),
(" ", ""),
("A", "A"),
("OXX", "OXX"),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), "x" * (TextField.DEFAULT_MAX_LENGTH - 1)),
("x" * TextField.DEFAULT_MAX_LENGTH, "x" * TextField.DEFAULT_MAX_LENGTH),
(True, "True"),
(7, "7")
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("", ""),
(" ", ""),
("A", "A"),
("OXX", "OXX"),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), "x" * (TextField.DEFAULT_MAX_LENGTH - 1)),
("x" * TextField.DEFAULT_MAX_LENGTH, "x" * TextField.DEFAULT_MAX_LENGTH),
(True, "True"),
(7, "7")
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
(None, FieldNoneNotAllowedError),
(object(), FieldTypeMismatchError),
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), FieldMaxLengthReachedError),
([7, 9], FieldTypeMismatchError),
({7: 9}, FieldTypeMismatchError),
({7, 9}, FieldTypeMismatchError),
((7, 9), FieldTypeMismatchError)
)
class TestTextFieldValueAllowNone(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return TextField("k", allow_none=True)
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, True),
("", True),
(" ", True),
("A", True),
("OXX", True),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), True),
("x" * TextField.DEFAULT_MAX_LENGTH, True),
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), True),
(True, True),
(7, True),
(object(), False),
([7, 9], False),
({7: 9}, False),
({7, 9}, False),
((7, 9), False)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, True),
("", True),
(" ", True),
("A", True),
("OXX", True),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), True),
("x" * TextField.DEFAULT_MAX_LENGTH, True),
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), False),
(True, True),
(7, True),
(object(), False),
([7, 9], False),
({7: 9}, False),
({7, 9}, False),
((7, 9), False)
)
def is_auto_cast(self) -> bool:
return True
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(None, None),
("", ""),
(" ", ""),
("A", "A"),
("OXX", "OXX"),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), "x" * (TextField.DEFAULT_MAX_LENGTH - 1)),
("x" * TextField.DEFAULT_MAX_LENGTH, "x" * TextField.DEFAULT_MAX_LENGTH),
(True, "True"),
(7, "7")
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(None, None),
("", ""),
(" ", ""),
("A", "A"),
("OXX", "OXX"),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), "x" * (TextField.DEFAULT_MAX_LENGTH - 1)),
("x" * TextField.DEFAULT_MAX_LENGTH, "x" * TextField.DEFAULT_MAX_LENGTH),
(True, "True"),
(7, "7")
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), FieldMaxLengthReachedError),
(object(), FieldTypeMismatchError),
([7, 9], FieldTypeMismatchError),
({7: 9}, FieldTypeMismatchError),
({7, 9}, FieldTypeMismatchError),
((7, 9), FieldTypeMismatchError)
)
class TestTextFieldValueNoAutocast(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return TextField("k", auto_cast=False)
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
("", True),
(" ", True),
("A", True),
("OXX", True),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), True),
("x" * TextField.DEFAULT_MAX_LENGTH, True),
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), True),
(True, True),
(7, True),
(object(), False),
([7, 9], False),
({7: 9}, False),
({7, 9}, False),
((7, 9), False)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
("", True),
(" ", True),
("A", True),
("OXX", True),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), True),
("x" * TextField.DEFAULT_MAX_LENGTH, True),
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), False),
(True, True),
(7, True),
(object(), False),
([7, 9], False),
({7: 9}, False),
({7, 9}, False),
((7, 9), False)
)
def is_auto_cast(self) -> bool:
return False
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("", ""),
(" ", ""),
("A", "A"),
("OXX", "OXX"),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), "x" * (TextField.DEFAULT_MAX_LENGTH - 1)),
("x" * TextField.DEFAULT_MAX_LENGTH, "x" * TextField.DEFAULT_MAX_LENGTH),
(True, "True"),
(7, "7")
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("", ""),
(" ", ""),
("A", "A"),
("OXX", "OXX"),
("x" * (TextField.DEFAULT_MAX_LENGTH - 1), "x" * (TextField.DEFAULT_MAX_LENGTH - 1)),
("x" * TextField.DEFAULT_MAX_LENGTH, "x" * TextField.DEFAULT_MAX_LENGTH),
(True, True),
(7, 7)
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
(None, FieldNoneNotAllowedError),
("x" * (TextField.DEFAULT_MAX_LENGTH + 1), FieldMaxLengthReachedError),
([7, 9], FieldTypeMismatchError),
({7: 9}, FieldTypeMismatchError),
({7, 9}, FieldTypeMismatchError),
((7, 9), FieldTypeMismatchError),
(object(), FieldTypeMismatchError)
)
class TestTextFieldValueMustHaveContent(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return TextField("k", must_have_content=True, default="default")
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
("", True),
(" ", True),
("X", True),
("XY", True),
("$&*)(@", True)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
("", False),
(" ", False),
("X", True),
("XY", True),
("$&*)(@", True)
)
def is_auto_cast(self) -> bool:
return True
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("X", "X"),
("XY", "XY"),
("$&*)(@", "$&*)(@")
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("X", "X"),
("XY", "XY"),
("$&*)(@", "$&*)(@")
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
("", FieldEmptyValueNotAllowedError),
(" ", FieldEmptyValueNotAllowedError),
)
class TestTextFieldValueWithRegex(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return TextField("k", regex="[A-F]{8}", default="AAAAAAAA")
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
("A", True),
("AB", True),
("accc", True),
("abcdefab", True),
("ABCDEFAG", True),
("ABCDEFAB", True),
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
("A", False),
("AB", False),
("accc", False),
("abcdefab", False),
("ABCDEFAG", False),
("ABCDEFAB", True),
)
def is_auto_cast(self) -> bool:
return True
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("ABCDEFAB", "ABCDEFAB"),
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("ABCDEFAB", "ABCDEFAB"),
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
(None, FieldNoneNotAllowedError),
("A", FieldRegexNotMatchError),
("AB", FieldRegexNotMatchError),
("accc", FieldRegexNotMatchError),
("abcdefab", FieldRegexNotMatchError),
("ABCDEFAG", FieldRegexNotMatchError)
)
class TestTextFieldValueDifferentMaxLength(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return TextField("k", maxlen=500)
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
("A", True),
("AAA", True),
("A" * 499, True),
("A" * 500, True),
("A" * 501, True),
(" " * 501, True)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
("A", True),
("AAA", True),
("A" * 499, True),
("A" * 500, True),
("A" * 501, False),
(" " * 501, True)
)
def is_auto_cast(self) -> bool:
return True
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("A", "A"),
("AAA", "AAA"),
("A" * 499, "A" * 499),
("A" * 500, "A" * 500),
(" " * 501, "")
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
("A", "A"),
("AAA", "AAA"),
("A" * 499, "A" * 499),
("A" * 500, "A" * 500),
(" " * 501, "")
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
("A" * 501, FieldMaxLengthReachedError),
)
class TestTextFieldValueNoStrip(TestFieldValue.TestClass):
def get_field(self) -> BaseField:
return TextField("k", maxlen=5, strip=False)
def get_value_type_match_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
(" " * 4, True),
(" " * 5, True),
(" " * 6, True)
)
def get_value_validity_test(self) -> Tuple[Tuple[Any, bool], ...]:
return (
(None, False),
(" " * 4, True),
(" " * 5, True),
(" " * 6, False)
)
def is_auto_cast(self) -> bool:
return True
def get_values_to_cast(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(" " * 4, " " * 4),
(" " * 5, " " * 5),
)
def get_valid_value_to_set(self) -> Tuple[Tuple[Any, Any], ...]:
return (
(" " * 4, " " * 4),
(" " * 5, " " * 5)
)
def get_invalid_value_to_set(self) -> Tuple[Tuple[Any, Type[FieldError]], ...]:
return (
(" " * 6, FieldMaxLengthReachedError),
)
class TestTextFieldExtra(TestCase):
def test_default_regex_not_match(self):
with self.assertRaises(FieldInvalidDefaultValueError):
TextField("k", default="ABCDE", regex=r"[A-E]{4}")
def test_default_no_content(self):
with self.assertRaises(FieldInvalidDefaultValueError):
TextField("k", default="", must_have_content=True)
| 31.773279
| 116
| 0.482416
| 1,472
| 15,696
| 4.949728
| 0.07269
| 0.068625
| 0.116662
| 0.13725
| 0.748422
| 0.748422
| 0.744304
| 0.732501
| 0.713286
| 0.713286
| 0
| 0.020202
| 0.344037
| 15,696
| 493
| 117
| 31.837728
| 0.687451
| 0
| 0
| 0.744076
| 0
| 0
| 0.044024
| 0.014144
| 0
| 0
| 0
| 0
| 0.004739
| 1
| 0.137441
| false
| 0
| 0.016588
| 0.132701
| 0.308057
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
883f94e232cb75e3b1d7c97591957bddde655a5a
| 2,970
|
py
|
Python
|
tests/parsers/test_ph.py
|
ltalirz/aiida-quantumespresso
|
4a57750dba4e09cf4a1c51a391ba9e385c1318ba
|
[
"MIT"
] | null | null | null |
tests/parsers/test_ph.py
|
ltalirz/aiida-quantumespresso
|
4a57750dba4e09cf4a1c51a391ba9e385c1318ba
|
[
"MIT"
] | null | null | null |
tests/parsers/test_ph.py
|
ltalirz/aiida-quantumespresso
|
4a57750dba4e09cf4a1c51a391ba9e385c1318ba
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# pylint: disable=unused-argument
"""Tests for the `PhParser`."""
from __future__ import absolute_import
import pytest
@pytest.fixture
def generate_inputs():
"""Return only those inputs that the parser will expect to be there."""
return {}
def test_ph_default(fixture_database, fixture_computer_localhost, generate_calc_job_node, generate_parser,
generate_inputs, data_regression):
"""Test a default `ph.x` calculation."""
name = 'default'
entry_point_calc_job = 'quantumespresso.ph'
entry_point_parser = 'quantumespresso.ph'
node = generate_calc_job_node(entry_point_calc_job, fixture_computer_localhost, name, generate_inputs)
parser = generate_parser(entry_point_parser)
results, calcfunction = parser.parse_from_node(node, store_provenance=False)
assert calcfunction.is_finished, calcfunction.exception
assert calcfunction.is_finished_ok, calcfunction.exit_message
assert 'output_parameters' in results
data_regression.check(results['output_parameters'].get_dict())
def test_ph_not_converged(fixture_database, fixture_computer_localhost, generate_calc_job_node, generate_parser,
generate_inputs, data_regression):
"""Test a `ph.x` calculation where convergence is not reached."""
name = 'failed_convergence_not_reached'
entry_point_calc_job = 'quantumespresso.ph'
entry_point_parser = 'quantumespresso.ph'
node = generate_calc_job_node(entry_point_calc_job, fixture_computer_localhost, name, generate_inputs)
parser = generate_parser(entry_point_parser)
results, calcfunction = parser.parse_from_node(node, store_provenance=False)
assert calcfunction.is_finished, calcfunction.exception
assert calcfunction.is_failed, calcfunction.exit_status
assert calcfunction.exit_status == node.process_class.exit_codes.ERROR_CONVERGENCE_NOT_REACHED.status
assert 'output_parameters' in results
data_regression.check(results['output_parameters'].get_dict())
def test_ph_out_of_walltime(fixture_database, fixture_computer_localhost, generate_calc_job_node, generate_parser,
generate_inputs, data_regression):
"""Test a `ph.x` calculation that runs out of walltime."""
name = 'failed_out_of_walltime'
entry_point_calc_job = 'quantumespresso.ph'
entry_point_parser = 'quantumespresso.ph'
node = generate_calc_job_node(entry_point_calc_job, fixture_computer_localhost, name, generate_inputs)
parser = generate_parser(entry_point_parser)
results, calcfunction = parser.parse_from_node(node, store_provenance=False)
assert calcfunction.is_finished, calcfunction.exception
assert calcfunction.is_failed, calcfunction.exit_status
assert calcfunction.exit_status == node.process_class.exit_codes.ERROR_OUT_OF_WALLTIME.status
assert 'output_parameters' in results
data_regression.check(results['output_parameters'].get_dict())
| 45
| 114
| 0.776094
| 367
| 2,970
| 5.893733
| 0.223433
| 0.038835
| 0.066574
| 0.052705
| 0.801202
| 0.801202
| 0.801202
| 0.801202
| 0.801202
| 0.801202
| 0
| 0.000393
| 0.143434
| 2,970
| 65
| 115
| 45.692308
| 0.849843
| 0.09899
| 0
| 0.674419
| 1
| 0
| 0.101548
| 0.01963
| 0
| 0
| 0
| 0
| 0.255814
| 1
| 0.093023
| false
| 0
| 0.046512
| 0
| 0.162791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8869b77683b6fe6d03b46bb8b78241c5dc7c3114
| 2,474
|
py
|
Python
|
investing_function.py
|
dan840611/Python
|
2a63976d9a5dd4b4f5e337280e4f6eb6d4cde318
|
[
"MIT"
] | null | null | null |
investing_function.py
|
dan840611/Python
|
2a63976d9a5dd4b4f5e337280e4f6eb6d4cde318
|
[
"MIT"
] | null | null | null |
investing_function.py
|
dan840611/Python
|
2a63976d9a5dd4b4f5e337280e4f6eb6d4cde318
|
[
"MIT"
] | 1
|
2021-05-09T12:55:48.000Z
|
2021-05-09T12:55:48.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 22 15:54:57 2017
@author: Dan
"""
import pandas as pd
import requests
import json
import datetime as dt
import Save_to_SQL as Save
def get_investing(url):
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36'
}
rs = requests.get(str(url), headers = headers).text
data = json.loads(rs)
#print (data['data'])
data = data['data']
date = []
value = []
for i in range(0, len(data), 1):
tmp1 = data[i][0]
tmp2 = float(data[i][1])
tmp1 = dt.datetime.fromtimestamp(tmp1/1000) #- dt.timedelta(days = 20)
date.append(tmp1.strftime('%Y-%m-%d')) # -01
value.append(tmp2)
#記得改名字
df = pd.DataFrame({'india-interest-rate':value, 'date':date}, index= date)
data = df.drop_duplicates(subset = 'date', keep = 'last').drop('date', axis = 1)
print(data)
# india interest rate
url = 'https://sbcharts.investing.com/events_charts/us/597.json'
get_investing(url)
#Save.Save_SeriesData(slug = '', Series = data['india-basic-interest-rate'])
################################################################################################################
import pandas as pd
import requests
import json
import datetime as dt
import Save_to_SQL as Save
def get_investing(url):
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36'
}
rs = requests.get(str(url), headers = headers).text
data = json.loads(rs)
#print (data['data'])
data = data['data']
date = []
value = []
for i in range(0, len(data), 1):
tmp1 = data[i][0]
tmp2 = float(data[i][1])
tmp1 = (dt.datetime.fromtimestamp(tmp1/1000) - dt.timedelta(days = 50))
date.append(tmp1.strftime('%Y-%m-01')) # -01
value.append(tmp2)
#記得改名字
df = pd.DataFrame({'india-IIP-yoy':value, 'date':date}, index= date)
data = df.drop_duplicates(subset = 'date', keep = 'last').drop('date', axis = 1)
print(data)
# india index of industrial production
url = 'https://sbcharts.investing.com/events_charts/us/435.json'
get_investing(url)
#Save.Save_SeriesData(slug = 'india-industries-index-production', Series = data['india-IIP-yoy'])
| 28.113636
| 148
| 0.593775
| 343
| 2,474
| 4.230321
| 0.341108
| 0.044108
| 0.049621
| 0.044108
| 0.851826
| 0.851826
| 0.818746
| 0.818746
| 0.704342
| 0.649207
| 0
| 0.057114
| 0.207357
| 2,474
| 87
| 149
| 28.436782
| 0.682815
| 0.164915
| 0
| 0.8
| 0
| 0.04
| 0.237849
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.2
| 0
| 0.24
| 0.04
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8889b0b7ce4d5ba363abe715519ccb760f1230e9
| 78
|
py
|
Python
|
prior_simulator/__init__.py
|
shermanlo77/cptimeseries
|
2a847ac15f7ea4925896c2a7baec78e8717e63f4
|
[
"MIT"
] | 3
|
2021-02-24T10:16:37.000Z
|
2021-05-26T13:42:58.000Z
|
prior_simulator/__init__.py
|
shermanlo77/cptimeseries
|
2a847ac15f7ea4925896c2a7baec78e8717e63f4
|
[
"MIT"
] | null | null | null |
prior_simulator/__init__.py
|
shermanlo77/cptimeseries
|
2a847ac15f7ea4925896c2a7baec78e8717e63f4
|
[
"MIT"
] | 1
|
2021-02-25T06:53:14.000Z
|
2021-02-25T06:53:14.000Z
|
from prior_simulator import downscale
from prior_simulator import time_series
| 26
| 39
| 0.897436
| 11
| 78
| 6.090909
| 0.636364
| 0.268657
| 0.537313
| 0.716418
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 78
| 2
| 40
| 39
| 0.957143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
88b7ec843288b66dba09bacbb5083dd7b94601f5
| 37
|
py
|
Python
|
samples/src/main/resources/datasets/python/119.py
|
sritchie/kotlingrad
|
8165ed1cd77220a5347c58cded4c6f2bcf22ee30
|
[
"Apache-2.0"
] | 11
|
2020-12-19T01:19:44.000Z
|
2021-12-25T20:43:33.000Z
|
src/main/resources/datasets/python/119.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | null | null | null |
src/main/resources/datasets/python/119.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | 2
|
2021-01-25T07:59:20.000Z
|
2021-08-07T07:13:49.000Z
|
def stringTest1():
return 'test'
| 12.333333
| 18
| 0.648649
| 4
| 37
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034483
| 0.216216
| 37
| 2
| 19
| 18.5
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
31f692c271584eb72cb47e6d3099a87445880c0a
| 210
|
py
|
Python
|
pythagoras/__init__.py
|
snglth/Pythagoras
|
95ad7af88f470174eea4cf060f31433d026c76c6
|
[
"MIT"
] | null | null | null |
pythagoras/__init__.py
|
snglth/Pythagoras
|
95ad7af88f470174eea4cf060f31433d026c76c6
|
[
"MIT"
] | null | null | null |
pythagoras/__init__.py
|
snglth/Pythagoras
|
95ad7af88f470174eea4cf060f31433d026c76c6
|
[
"MIT"
] | null | null | null |
from pythagoras.global_objects import *
from pythagoras.utils import *
from pythagoras.dicts import *
from pythagoras.p_hash_address import *
from pythagoras.not_known import *
from pythagoras.p_cloud import *
| 30
| 39
| 0.828571
| 29
| 210
| 5.827586
| 0.448276
| 0.497041
| 0.591716
| 0.248521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 210
| 6
| 40
| 35
| 0.908602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ee099201e76f413b60ec8cfe53cdf5b6dcf3e40f
| 4,903
|
py
|
Python
|
day05/day05b.py
|
kannix68/advent_of_code_2019
|
b02a86e1f8e83111973cc2bc8c7f4d5dcf1c10aa
|
[
"MIT"
] | null | null | null |
day05/day05b.py
|
kannix68/advent_of_code_2019
|
b02a86e1f8e83111973cc2bc8c7f4d5dcf1c10aa
|
[
"MIT"
] | null | null | null |
day05/day05b.py
|
kannix68/advent_of_code_2019
|
b02a86e1f8e83111973cc2bc8c7f4d5dcf1c10aa
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
##
# Advent of code 2019, AoC day 5 puzzle 2
# This solution (python3.7 jupyter notebook) by kannix68, @ 2020-01-03.
import sys
sys.path.insert(0, '..') # allow import from parent dir
import lib.aochelper as aoc
# In[ ]:
import logging
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
log = logging.getLogger(__name__)
#log.setLevel(logging.DEBUG)
log.setLevel(logging.INFO)
# In[ ]:
## PROBLEM DOMAIN code
from intcode_interpreter import *
# In[ ]:
## MAIN
# In[ ]:
### tests
# In[ ]:
# example "1", zero input
ins = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # "using position mode"
param = 0 # "output 0 if the input was zero or 1 if the input was non-zero"
computer = IntcodeInterpreter(ins)
computer.store_int_stdin(param)
computer.interpret_program()
result = computer.pseudo_stdout
print(f"result out={result}")
result = result[0]
expected = 0
aoc.assert_msg(f"input={ins} with param={param} expects output={expected}, got {result}", expected == result)
# In[ ]:
# example "1", non-zero input
ins = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # "using position mode"
param = 99 # "output 0 if the input was zero or 1 if the input was non-zero"
computer = IntcodeInterpreter(ins)
computer.store_int_stdin(param)
computer.interpret_program()
result = computer.pseudo_stdout
print(f"result out={result}")
result = result[0]
expected = 1
aoc.assert_msg(f"input={ins} with param={param} expects output={expected}, got {result}", expected == result)
# In[ ]:
# example "2", zero input
ins = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # "using immediate mode"
param = 0 # "output 0 if the input was zero or 1 if the input was non-zero"
computer = IntcodeInterpreter(ins)
computer.store_int_stdin(param)
computer.interpret_program()
result = computer.pseudo_stdout
print(f"result out={result}")
result = result[0]
expected = 0
aoc.assert_msg(f"input={ins} with param={param} expects output={expected}, got {result}", expected == result)
# In[ ]:
# example "2", non-zero input
ins = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # "using immediate mode"
param = 999 # "output 0 if the input was zero or 1 if the input was non-zero"
computer = IntcodeInterpreter(ins)
computer.store_int_stdin(param)
computer.interpret_program()
result = computer.pseudo_stdout
print(f"result out={result}")
result = result[0]
expected = 1
aoc.assert_msg(f"input={ins} with param={param} expects output={expected}, got {result}", expected == result)
# In[ ]:
# "larger" example "3", input < 8
ins = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,
1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,
999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99]
# output 999 if the input value is below 8,
# output 1000 if the input value is equal to 8,
# or output 1001 if the input value is greater than 8.
param = 7 #
computer = IntcodeInterpreter(ins)
computer.store_int_stdin(param)
computer.interpret_program()
result = computer.pseudo_stdout
print(f"result out={result}")
result = result[0]
expected = 999
aoc.assert_msg(f"input={ins} with param={param} expects output={expected}, got {result}", expected == result)
# In[ ]:
# "larger" example "3", input == 8
ins = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,
1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,
999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99]
# output 999 if the input value is below 8,
# output 1000 if the input value is equal to 8,
# or output 1001 if the input value is greater than 8.
param = 8 #
computer = IntcodeInterpreter(ins)
computer.store_int_stdin(param)
computer.interpret_program()
result = computer.pseudo_stdout
print(f"result out={result}")
result = result[0]
expected = 1000
aoc.assert_msg(f"input={ins} with param={param} expects output={expected}, got {result}", expected == result)
# In[ ]:
# "larger" example "3", input > 8
ins = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,
1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,
999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99]
# output 999 if the input value is below 8,
# output 1000 if the input value is equal to 8,
# or output 1001 if the input value is greater than 8.
param = 9 #
computer = IntcodeInterpreter(ins)
computer.store_int_stdin(param)
computer.interpret_program()
result = computer.pseudo_stdout
print(f"result out={result}")
result = result[0]
expected = 1001
aoc.assert_msg(f"input={ins} with param={param} expects output={expected}, got {result}", expected == result)
# In[ ]:
# In[ ]:
### personal input solution
data = aoc.read_file_firstline_to_str('day05.in')
data = list(map(int, data.split(',')))
print(f"data-type={type(data)} data={data}")
computer = IntcodeInterpreter(data)
computer.store_int_stdin(5)
computer.interpret_program()
#result = computer.mem
result = computer.pseudo_stdout
print(f"result={result}")
| 25.404145
| 109
| 0.708546
| 838
| 4,903
| 4.088305
| 0.167064
| 0.02481
| 0.049621
| 0.039405
| 0.832166
| 0.819323
| 0.819323
| 0.808231
| 0.808231
| 0.808231
| 0
| 0.123413
| 0.132368
| 4,903
| 192
| 110
| 25.536458
| 0.681946
| 0.274118
| 0
| 0.76087
| 0
| 0
| 0.195254
| 0.006289
| 0
| 0
| 0
| 0
| 0.076087
| 1
| 0
| false
| 0
| 0.043478
| 0
| 0.043478
| 0.097826
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee2cd5c6cee94aed4d6bc12433dd7a01d77aa28e
| 264
|
py
|
Python
|
bluebottle/payments_docdata/tests/factory_models.py
|
maykinmedia/bluebottle
|
355d4729662b5e9a03398efb4fe882e0f8cfa28d
|
[
"BSD-3-Clause"
] | null | null | null |
bluebottle/payments_docdata/tests/factory_models.py
|
maykinmedia/bluebottle
|
355d4729662b5e9a03398efb4fe882e0f8cfa28d
|
[
"BSD-3-Clause"
] | null | null | null |
bluebottle/payments_docdata/tests/factory_models.py
|
maykinmedia/bluebottle
|
355d4729662b5e9a03398efb4fe882e0f8cfa28d
|
[
"BSD-3-Clause"
] | null | null | null |
import factory
from ..models import DocdataPayment, DocdataTransaction
class DocdataPaymentFactory(factory.DjangoModelFactory):
FACTORY_FOR = DocdataPayment
class DocdataTransactionFactory(factory.DjangoModelFactory):
FACTORY_FOR = DocdataTransaction
| 22
| 60
| 0.837121
| 21
| 264
| 10.428571
| 0.52381
| 0.228311
| 0.292237
| 0.319635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 264
| 11
| 61
| 24
| 0.935897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c9f79441b8526b3200e5f8725c3816a16548adaf
| 69
|
py
|
Python
|
python_modules/libraries/dagster-aws/dagster_aws/ecr/__init__.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 4,606
|
2018-06-21T17:45:20.000Z
|
2022-03-31T23:39:42.000Z
|
python_modules/libraries/dagster-aws/dagster_aws/ecr/__init__.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 6,221
|
2018-06-12T04:36:01.000Z
|
2022-03-31T21:43:05.000Z
|
python_modules/libraries/dagster-aws/dagster_aws/ecr/__init__.py
|
dbatten5/dagster
|
d76e50295054ffe5a72f9b292ef57febae499528
|
[
"Apache-2.0"
] | 619
|
2018-08-22T22:43:09.000Z
|
2022-03-31T22:48:06.000Z
|
from .resources import ecr_public_resource, fake_ecr_public_resource
| 34.5
| 68
| 0.898551
| 10
| 69
| 5.7
| 0.7
| 0.315789
| 0.596491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072464
| 69
| 1
| 69
| 69
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a016b3eff4e94c2f6d732c4729025b3a181e2603
| 3,061
|
py
|
Python
|
alphaml/engine/components/data_preprocessing/scaler.py
|
dingdian110/alpha-ml
|
d6a7a8a8a3452a7e3362bf0ef32b9ac5fe215fde
|
[
"BSD-3-Clause"
] | 1
|
2021-09-06T20:21:15.000Z
|
2021-09-06T20:21:15.000Z
|
alphaml/engine/components/data_preprocessing/scaler.py
|
dingdian110/alpha-ml
|
d6a7a8a8a3452a7e3362bf0ef32b9ac5fe215fde
|
[
"BSD-3-Clause"
] | null | null | null |
alphaml/engine/components/data_preprocessing/scaler.py
|
dingdian110/alpha-ml
|
d6a7a8a8a3452a7e3362bf0ef32b9ac5fe215fde
|
[
"BSD-3-Clause"
] | null | null | null |
from sklearn.preprocessing import MinMaxScaler, StandardScaler, MaxAbsScaler
from sklearn.preprocessing import normalize as sklearn_normalize
def minmax_scale(dm):
feature_types = dm.feature_types
numercial_index = [i for i in range(len(feature_types))
if feature_types[i] == "Float" or feature_types[i] == "Discrete"]
(train_x, _), (valid_x, _), (test_x, _) = dm.get_train(), dm.get_val(), dm.get_test()
scaler = MinMaxScaler()
train_x[:, numercial_index] = scaler.fit_transform(train_x[:, numercial_index])
dm.train_X = train_x
if valid_x is not None:
valid_x[:, numercial_index] = scaler.transform(valid_x[:, numercial_index])
dm.val_X = valid_x
if test_x is not None:
test_x[:, numercial_index] = scaler.transform(test_x[:, numercial_index])
dm.test_X = test_x
return dm
def standard_scale(dm):
feature_types = dm.feature_types
numercial_index = [i for i in range(len(feature_types))
if feature_types[i] == "Float" or feature_types[i] == "Discrete"]
(train_x, _), (valid_x, _), (test_x, _) = dm.get_train(), dm.get_val(), dm.get_test()
scaler = StandardScaler()
train_x[:, numercial_index] = scaler.fit_transform(train_x[:, numercial_index])
dm.train_X = train_x
if valid_x is not None:
valid_x[:, numercial_index] = scaler.transform(valid_x[:, numercial_index])
dm.val_X = valid_x
if test_x is not None:
test_x[:, numercial_index] = scaler.transform(test_x[:, numercial_index])
dm.test_X = test_x
return dm
def maxabs_scale(dm):
feature_types = dm.feature_types
numercial_index = [i for i in range(len(feature_types))
if feature_types[i] == "Float" or feature_types[i] == "Discrete"]
(train_x, _), (valid_x, _), (test_x, _) = dm.get_train(), dm.get_val(), dm.get_test()
scaler = MaxAbsScaler()
train_x[:, numercial_index] = scaler.fit_transform(train_x[:, numercial_index])
dm.train_X = train_x
if valid_x is not None:
valid_x[:, numercial_index] = scaler.transform(valid_x[:, numercial_index])
dm.val_X = valid_x
if test_x is not None:
test_x[:, numercial_index] = scaler.transform(test_x[:, numercial_index])
dm.test_X = test_x
return dm
def normalize(dm, norm="l2"):
feature_types = dm.feature_types
numercial_index = [i for i in range(len(feature_types))
if feature_types[i] == "Float" or feature_types[i] == "Discrete"]
(train_x, _), (valid_x, _), (test_x, _) = dm.get_train(), dm.get_val(), dm.get_test()
train_x[:, numercial_index] = sklearn_normalize(train_x[:, numercial_index], norm)
dm.train_X = train_x
if valid_x is not None:
valid_x[:, numercial_index] = sklearn_normalize(valid_x[:, numercial_index], norm)
dm.val_X = valid_x
if test_x is not None:
test_x[:, numercial_index] = sklearn_normalize(test_x[:, numercial_index], norm)
dm.test_X = test_x
return dm
| 36.440476
| 90
| 0.656321
| 440
| 3,061
| 4.225
| 0.097727
| 0.210866
| 0.193653
| 0.101668
| 0.880581
| 0.81603
| 0.81603
| 0.805272
| 0.805272
| 0.805272
| 0
| 0.000418
| 0.219209
| 3,061
| 83
| 91
| 36.879518
| 0.777406
| 0
| 0
| 0.803279
| 0
| 0
| 0.017641
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065574
| false
| 0
| 0.032787
| 0
| 0.163934
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4e4c36c4bbf82a2fb3afc34e2c52c8d0d8ab1fe8
| 3,509
|
py
|
Python
|
tests/test_cli_single_two.py
|
7rick03ligh7/pyhicrep
|
3c2e22259cd12ea22e938d329a671d1d3ac3b5e5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_cli_single_two.py
|
7rick03ligh7/pyhicrep
|
3c2e22259cd12ea22e938d329a671d1d3ac3b5e5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_cli_single_two.py
|
7rick03ligh7/pyhicrep
|
3c2e22259cd12ea22e938d329a671d1d3ac3b5e5
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
import shutil
from .utils import read_txt_results
from pyhicrep.cli import main
reference_data = "./tests/r_hicrep_results.txt"
def test_run_single_from_cli_two_files_chr1():
sys.argv = ["pyhicrep",
"--file1=./tests/data/1CSE-10.cool",
"--file2=./tests/data/1CSE-11.cool",
"--chr=chr1",
"--maxBins=50",
"--h=3",
"--resFolder=test_results",
"-pbar"]
main()
real_data = read_txt_results(reference_data)
gen_data = read_txt_results('./test_results/result_SCC.txt')
assert abs(gen_data['data']['1CSE-10.cool 1CSE-11.cool'][0] -
real_data['data']['1CSE-10.cool 1CSE-11.cool'][0]) < 0.01
shutil.rmtree('./test_results')
def test_run_single_from_cli_two_files_chr2():
sys.argv = ["pyhicrep",
"--file1=./tests/data/1CSE-10.cool",
"--file2=./tests/data/1CSE-11.cool",
"--chr=chr2",
"--maxBins=50",
"--h=3",
"--resFolder=test_results",
"-silent"]
main()
real_data = read_txt_results(reference_data)
gen_data = read_txt_results('./test_results/result_SCC.txt')
assert abs(gen_data['data']['1CSE-10.cool 1CSE-11.cool'][0] -
real_data['data']['1CSE-10.cool 1CSE-11.cool'][1]) < 0.01
shutil.rmtree('./test_results')
def test_run_single_from_cli_two_files_chr3():
sys.argv = ["pyhicrep",
"--file1=./tests/data/1CSE-10.cool",
"--file2=./tests/data/1CSE-11.cool",
"--chr=chr3",
"--maxBins=50",
"--h=3",
"--outFile=testout.txt",
"--resFolder=test_results",
"-silent"]
main()
real_data = read_txt_results(reference_data)
gen_data = read_txt_results('./test_results/testout.txt')
assert abs(gen_data['data']['1CSE-10.cool 1CSE-11.cool'][0] -
real_data['data']['1CSE-10.cool 1CSE-11.cool'][2]) < 0.01
shutil.rmtree('./test_results')
def test_run_single_from_cli_two_files_chr1chr3():
sys.argv = ["pyhicrep",
"--file1=./tests/data/1CSE-10.cool",
"--file2=./tests/data/1CSE-11.cool",
"--chr=chr1,chr3",
"--maxBins=50",
"--h=3",
"--outFile=testout.txt",
"--resFolder=test_results",
"-silent"]
main()
real_data = read_txt_results(reference_data)
gen_data = read_txt_results('./test_results/testout.txt')
assert abs(gen_data['data']['1CSE-10.cool 1CSE-11.cool'][0] -
real_data['data']['1CSE-10.cool 1CSE-11.cool'][0]) < 0.01
shutil.rmtree('./test_results')
def test_run_single_from_cli_two_files_chrFile():
sys.argv = ["pyhicrep",
"--file1=./tests/data/1CSE-10.cool",
"--file2=./tests/data/1CSE-11.cool",
"--chrFile=./tests/chr.txt",
"--maxBins=50",
"--h=3",
"--outFile=testout.txt",
"--resFolder=test_results",
"-pbar",
"-saveCSV"]
main()
real_data = read_txt_results(reference_data)
gen_data = read_txt_results('./test_results/testout.txt')
assert abs(gen_data['data']['1CSE-10.cool 1CSE-11.cool'][0] -
real_data['data']['1CSE-10.cool 1CSE-11.cool'][0]) < 0.01
shutil.rmtree('./test_results')
| 36.936842
| 72
| 0.54346
| 432
| 3,509
| 4.178241
| 0.12963
| 0.088643
| 0.083102
| 0.116343
| 0.902493
| 0.902493
| 0.902493
| 0.879224
| 0.86205
| 0.86205
| 0
| 0.0595
| 0.281562
| 3,509
| 94
| 73
| 37.329787
| 0.656486
| 0
| 0
| 0.761905
| 0
| 0
| 0.362211
| 0.200057
| 0
| 0
| 0
| 0
| 0.059524
| 1
| 0.059524
| false
| 0
| 0.047619
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e65ee317f7edd3d0e39a0b8cec20f4db64f0b6e
| 47
|
py
|
Python
|
code/dynocard_alert/modules/edge_ml/test.py
|
DJuanes/iot-edge-dynocard
|
02b1666780969caad1ca1659158500835e2ec79f
|
[
"MIT"
] | 9
|
2018-06-25T15:40:21.000Z
|
2021-06-25T19:22:32.000Z
|
code/dynocard_alert/modules/edge_ml/test.py
|
DJuanes/iot-edge-dynocard
|
02b1666780969caad1ca1659158500835e2ec79f
|
[
"MIT"
] | 17
|
2019-02-28T12:54:10.000Z
|
2022-03-02T02:51:00.000Z
|
code/dynocard_alert/modules/edge_ml/test.py
|
DJuanes/iot-edge-dynocard
|
02b1666780969caad1ca1659158500835e2ec79f
|
[
"MIT"
] | 11
|
2018-05-31T05:55:21.000Z
|
2020-09-17T00:40:19.000Z
|
import azureml.core
print(azureml.core.VERSION)
| 23.5
| 27
| 0.851064
| 7
| 47
| 5.714286
| 0.714286
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 47
| 2
| 27
| 23.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
14c37cb4be61a667ed3d6b010a1c080a687439f4
| 283
|
py
|
Python
|
tests/extmod/ubinascii_hexlify.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 303
|
2015-07-11T17:12:55.000Z
|
2018-01-08T03:02:37.000Z
|
tests/extmod/ubinascii_hexlify.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 27
|
2015-01-02T16:17:37.000Z
|
2015-09-07T19:21:26.000Z
|
tests/extmod/ubinascii_hexlify.py
|
peterson79/pycom-micropython-sigfox
|
3f93fc2c02567c96f18cff4af9125db8fd7a6fb4
|
[
"MIT"
] | 26
|
2018-01-18T09:15:33.000Z
|
2022-02-07T13:09:14.000Z
|
try:
import ubinascii as binascii
except ImportError:
import binascii
print(binascii.hexlify(b'\x00\x01\x02\x03\x04\x05\x06\x07'))
print(binascii.hexlify(b'\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f'))
print(binascii.hexlify(b'\x7f\x80\xff'))
print(binascii.hexlify(b'1234ABCDabcd'))
| 28.3
| 60
| 0.749117
| 45
| 283
| 4.711111
| 0.666667
| 0.245283
| 0.377358
| 0.396226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126437
| 0.077739
| 283
| 9
| 61
| 31.444444
| 0.685824
| 0
| 0
| 0
| 0
| 0
| 0.310954
| 0.226148
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.375
| 0
| 0.375
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
14e354f7ce396033ea6784ca421cf836a3923a15
| 8,701
|
py
|
Python
|
app/tests/unit/test_user_interface_repo.py
|
JulienBalestra/enjoliver
|
13b41d0c40a56ea212a88d3e4f3aee91a318f3f0
|
[
"MIT"
] | 33
|
2017-01-20T11:58:32.000Z
|
2021-08-21T16:33:18.000Z
|
app/tests/unit/test_user_interface_repo.py
|
JulienBalestra/enjoliver
|
13b41d0c40a56ea212a88d3e4f3aee91a318f3f0
|
[
"MIT"
] | 8
|
2017-04-20T14:17:37.000Z
|
2017-12-22T11:25:24.000Z
|
app/tests/unit/test_user_interface_repo.py
|
JulienBalestra/enjoliver
|
13b41d0c40a56ea212a88d3e4f3aee91a318f3f0
|
[
"MIT"
] | 5
|
2017-04-19T14:36:12.000Z
|
2017-10-10T11:09:16.000Z
|
import unittest
from app import smartdb, model
from model import MachineCurrentState, MachineInterface, Machine, MachineStates, MachineDisk, Schedule, ScheduleRoles, \
LifecycleRolling
from repositories import user_interface_repo
class TestMachineStateRepo(unittest.TestCase):
@classmethod
def setUpClass(cls):
db_uri = 'sqlite:///:memory:'
cls.smart = smartdb.SmartDatabaseClient(db_uri)
def setUp(self):
model.BASE.metadata.drop_all(self.smart.get_engine_connection())
model.BASE.metadata.create_all(self.smart.get_engine_connection())
def test_empty(self):
ui = user_interface_repo.UserInterfaceRepository(self.smart)
self.assertEqual([], ui.get_machines_overview())
def test_one_machine_with_only_interfaces(self):
mac = "00:00:00:00:00:00"
with self.smart.new_session() as session:
uuid = "b7f5f93a-b029-475f-b3a4-479ba198cb8a"
machine = Machine(uuid=uuid)
session.add(machine)
machine_id = session.query(Machine).filter(Machine.uuid == uuid).first().id
session.add(
MachineInterface(machine_id=machine_id, mac=mac, netmask=1, ipv4="10.10.10.10", cidrv4="127.0.0.1/8",
as_boot=True, gateway="1.1.1.1", name="lol"))
session.commit()
expect = list()
expect.append(
{
'CIDR': '127.0.0.1/8',
'LastReport': None,
'UpdateStrategy': 'Disable',
'LastChange': None,
'MAC': '00:00:00:00:00:00',
'UpToDate': None,
'FQDN': None,
'DiskProfile': 'inMemory',
'LastState': None,
'Roles': ''}
)
ui = user_interface_repo.UserInterfaceRepository(self.smart)
self.assertCountEqual(expect, ui.get_machines_overview())
def test_one_machine_full(self):
mac = "00:00:00:00:00:00"
with self.smart.new_session() as session:
uuid = "b7f5f93a-b029-475f-b3a4-479ba198cb8a"
machine = Machine(uuid=uuid)
session.add(machine)
machine_id = session.query(Machine).filter(Machine.uuid == uuid).first().id
session.add(
MachineInterface(machine_id=machine_id, mac=mac, netmask=1, ipv4="10.10.10.10", cidrv4="127.0.0.1/8",
as_boot=True, gateway="1.1.1.1", name="lol"))
session.add(
MachineDisk(path="/dev/sda", size=1024 * 1024 * 1024, machine_id=machine_id)
)
session.add(
MachineCurrentState(machine_id=machine_id, machine_mac=mac, state_name=MachineStates.discovery)
)
session.commit()
expect = list()
expect.append(
{
'CIDR': '127.0.0.1/8',
'LastReport': None,
'UpdateStrategy': 'Disable',
'LastChange': None,
'MAC': '00:00:00:00:00:00',
'UpToDate': None,
'FQDN': None,
'DiskProfile': 'S',
'LastState': MachineStates.discovery,
'Roles': ''}
)
ui = user_interface_repo.UserInterfaceRepository(self.smart)
data = ui.get_machines_overview()
self.assertCountEqual(expect, data)
def test_one_machine_full_scheduled(self):
mac = "00:00:00:00:00:00"
with self.smart.new_session() as session:
uuid = "b7f5f93a-b029-475f-b3a4-479ba198cb8a"
machine = Machine(uuid=uuid)
session.add(machine)
machine_id = session.query(Machine).filter(Machine.uuid == uuid).first().id
session.add(
MachineInterface(machine_id=machine_id, mac=mac, netmask=1, ipv4="10.10.10.10", cidrv4="127.0.0.1/8",
as_boot=True, gateway="1.1.1.1", name="lol"))
session.add(
MachineDisk(path="/dev/sda", size=1024 * 1024 * 1024, machine_id=machine_id)
)
session.add(
MachineCurrentState(machine_id=machine_id, machine_mac=mac, state_name=MachineStates.discovery)
)
session.add(
Schedule(machine_id=machine_id, role=ScheduleRoles.kubernetes_control_plane)
)
session.commit()
expect = list()
expect.append(
{
'CIDR': '127.0.0.1/8',
'LastReport': None,
'UpdateStrategy': 'Disable',
'LastChange': None,
'MAC': '00:00:00:00:00:00',
'UpToDate': None,
'FQDN': None,
'DiskProfile': 'S',
'LastState': MachineStates.discovery,
'Roles': ScheduleRoles.kubernetes_control_plane}
)
ui = user_interface_repo.UserInterfaceRepository(self.smart)
data = ui.get_machines_overview()
self.assertCountEqual(expect, data)
def test_one_machine_full_scheduled_with_strategy(self):
mac = "00:00:00:00:00:00"
with self.smart.new_session() as session:
uuid = "b7f5f93a-b029-475f-b3a4-479ba198cb8a"
machine = Machine(uuid=uuid)
session.add(machine)
machine_id = session.query(Machine).filter(Machine.uuid == uuid).first().id
session.add(
MachineInterface(machine_id=machine_id, mac=mac, netmask=1, ipv4="10.10.10.10", cidrv4="127.0.0.1/8",
as_boot=True, gateway="1.1.1.1", name="lol"))
session.add(
MachineDisk(path="/dev/sda", size=1024 * 1024 * 1024, machine_id=machine_id)
)
session.add(
MachineCurrentState(machine_id=machine_id, machine_mac=mac, state_name=MachineStates.discovery)
)
session.add(
Schedule(machine_id=machine_id, role=ScheduleRoles.kubernetes_control_plane)
)
session.add(
LifecycleRolling(machine_id=machine_id, strategy="kexec", enable=True)
)
session.commit()
expect = list()
expect.append(
{
'CIDR': '127.0.0.1/8',
'LastReport': None,
'UpdateStrategy': 'kexec',
'LastChange': None,
'MAC': '00:00:00:00:00:00',
'UpToDate': None,
'FQDN': None,
'DiskProfile': 'S',
'LastState': MachineStates.discovery,
'Roles': ScheduleRoles.kubernetes_control_plane}
)
ui = user_interface_repo.UserInterfaceRepository(self.smart)
data = ui.get_machines_overview()
self.assertCountEqual(expect, data)
def test_one_machine_full_scheduled_with_strategy_disable(self):
mac = "00:00:00:00:00:00"
with self.smart.new_session() as session:
uuid = "b7f5f93a-b029-475f-b3a4-479ba198cb8a"
machine = Machine(uuid=uuid)
session.add(machine)
machine_id = session.query(Machine).filter(Machine.uuid == uuid).first().id
session.add(
MachineInterface(machine_id=machine_id, mac=mac, netmask=1, ipv4="10.10.10.10", cidrv4="127.0.0.1/8",
as_boot=True, gateway="1.1.1.1", name="lol"))
session.add(
MachineDisk(path="/dev/sda", size=1024 * 1024 * 1024, machine_id=machine_id)
)
session.add(
MachineCurrentState(machine_id=machine_id, machine_mac=mac, state_name=MachineStates.discovery)
)
session.add(
Schedule(machine_id=machine_id, role=ScheduleRoles.kubernetes_control_plane)
)
session.add(
LifecycleRolling(machine_id=machine_id, strategy="kexec", enable=False)
)
session.commit()
expect = list()
expect.append(
{
'CIDR': '127.0.0.1/8',
'LastReport': None,
'UpdateStrategy': 'Disable',
'LastChange': None,
'MAC': '00:00:00:00:00:00',
'UpToDate': None,
'FQDN': None,
'DiskProfile': 'S',
'LastState': MachineStates.discovery,
'Roles': ScheduleRoles.kubernetes_control_plane}
)
ui = user_interface_repo.UserInterfaceRepository(self.smart)
data = ui.get_machines_overview()
self.assertCountEqual(expect, data)
| 40.282407
| 120
| 0.551891
| 906
| 8,701
| 5.155629
| 0.129139
| 0.042817
| 0.051381
| 0.051381
| 0.896168
| 0.895312
| 0.882038
| 0.882038
| 0.841148
| 0.841148
| 0
| 0.069744
| 0.327663
| 8,701
| 215
| 121
| 40.469767
| 0.728718
| 0
| 0
| 0.747423
| 0
| 0
| 0.121825
| 0.020687
| 0
| 0
| 0
| 0
| 0.030928
| 1
| 0.041237
| false
| 0
| 0.020619
| 0
| 0.06701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
090d033c32805518e72d418e271a085f71117830
| 4,318
|
py
|
Python
|
stats/unpickle.py
|
kchiang6997/toxicity
|
d6c4b1849c072b4eb7b492d1eb17b699b46510b2
|
[
"MIT"
] | null | null | null |
stats/unpickle.py
|
kchiang6997/toxicity
|
d6c4b1849c072b4eb7b492d1eb17b699b46510b2
|
[
"MIT"
] | null | null | null |
stats/unpickle.py
|
kchiang6997/toxicity
|
d6c4b1849c072b4eb7b492d1eb17b699b46510b2
|
[
"MIT"
] | null | null | null |
import pickle
print("gru glove 150")
with open('gru_glove_epoch_5_150d_f1s.p', 'rb') as f1:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f1)
print("f1")
print(data)
with open('gru_glove_epoch_5_150d_nbatch.p', 'rb') as f2:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f2)
print("nbatch")
print(data)
with open('gru_glove_epoch_5_150d_precisions.p', 'rb') as f3:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f3)
print("precision")
print(data)
with open('gru_glove_epoch_5_150d_recalls.p', 'rb') as f4:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f4)
print("recalls")
print(data)
print("gru glove 50")
with open('gru_glove_epoch_5_f1s.p', 'rb') as f5:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f5)
print("f1")
print(data)
with open('gru_glove_epoch_5_nbatch.p', 'rb') as f6:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f6)
print("nbatch")
print(data)
with open('gru_glove_epoch_5_precisions.p', 'rb') as f7:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f7)
print("precision")
print(data)
with open('gru_glove_epoch_5_recalls.p', 'rb') as f8:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f8)
print("recalls")
print(data)
print("lstm glove 150")
with open('lstm_glove_epoch_5_150d_f1s.p', 'rb') as f9:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f9)
print("f1")
print(data)
with open('lstm_glove_epoch_5_150d_nbatch.p', 'rb') as f10:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f10)
print("nbatch")
print(data)
with open('lstm_glove_epoch_5_150d_precisions.p', 'rb') as f11:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f11)
print("precision")
print(data)
with open('lstm_glove_epoch_5_150d_recalls.p', 'rb') as f12:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f12)
print("recalls")
print(data)
print("lstm glove 50")
with open('lstm_glove_epoch_5_f1s.p', 'rb') as f13:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f13)
print("f1")
print(data)
with open('lstm_glove_epoch_5_nbatch.p', 'rb') as f14:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f14)
print("nbatch")
print(data)
with open('lstm_glove_epoch_5_precisions.p', 'rb') as f15:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f15)
print("precision")
print(data)
with open('lstm_glove_epoch_5_recalls.p', 'rb') as f16:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f16)
print("recalls")
print(data)
print("lstm word2vec 50")
with open('lstm_word2vec_epoch_5_50d_f1s.p', 'rb') as f17:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f17)
print("f1")
print(data)
with open('lstm_word2vec_epoch_5_50d_nbatch.p', 'rb') as f18:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f18)
print("nbatch")
print(data)
with open('lstm_word2vec_epoch_5_50d_precisions.p', 'rb') as f19:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f19)
print("precision")
print(data)
with open('lstm_word2vec_epoch_5_50d_recalls.p', 'rb') as f20:
# The protocol version used is detected automatically, so we do not
# have to specify it.
data = pickle.load(f20)
print("recalls")
print(data)
| 29.575342
| 71
| 0.714914
| 714
| 4,318
| 4.194678
| 0.084034
| 0.053422
| 0.033389
| 0.146912
| 0.925876
| 0.917195
| 0.899499
| 0.810017
| 0.769282
| 0.723205
| 0
| 0.039529
| 0.173923
| 4,318
| 146
| 72
| 29.575342
| 0.800112
| 0.398101
| 0
| 0.465116
| 0
| 0
| 0.327472
| 0.238374
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011628
| 0
| 0.011628
| 0.523256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
117eb18a9018e7da2bbb7e0836e8f8d5589974d1
| 2,005
|
py
|
Python
|
ixnetwork_restpy/pytest_tests/tests/multivalue_tests/test_multivalue_port_step.py
|
rfrye-github/ixnetwork_restpy
|
23eeb24b21568a23d3f31bbd72814ff55eb1af44
|
[
"MIT"
] | 20
|
2019-05-07T01:59:14.000Z
|
2022-02-11T05:24:47.000Z
|
ixnetwork_restpy/pytest_tests/tests/multivalue_tests/test_multivalue_port_step.py
|
rfrye-github/ixnetwork_restpy
|
23eeb24b21568a23d3f31bbd72814ff55eb1af44
|
[
"MIT"
] | 60
|
2019-04-03T18:59:35.000Z
|
2022-02-22T12:05:05.000Z
|
ixnetwork_restpy/pytest_tests/tests/multivalue_tests/test_multivalue_port_step.py
|
rfrye-github/ixnetwork_restpy
|
23eeb24b21568a23d3f31bbd72814ff55eb1af44
|
[
"MIT"
] | 13
|
2019-05-20T10:48:31.000Z
|
2021-10-06T07:45:44.000Z
|
def test_can_set_incremental_port_step_ipv4_address(vports):
vport_1, vport_2 = vports
ixnetwork = vport_1._parent
topo = ixnetwork.Topology.add(Vports=vports)
ipv4_1 = topo.DeviceGroup.add(Multiplier=1).\
Ethernet.add().\
Ipv4.add()
address_obj = ipv4_1.Address
address_obj.Increment(start_value='1.1.1.1',step_value='0.0.0.0')
# setting port step
address_obj.Steps.Step = '1.0.0.0'
assert address_obj.Values[1] == '2.1.1.1'
assert address_obj.Steps.Step == '1.0.0.0'
def test_can_disbale_port_step(vports):
vport_1, vport_2 = vports
ixnetwork = vport_1._parent
topo = ixnetwork.Topology.add(Vports=vports)
ipv4_1 = topo.DeviceGroup.add(Multiplier=1). \
Ethernet.add(). \
Ipv4.add()
address_obj = ipv4_1.Address
address_obj.Increment(start_value='1.1.1.1', step_value='0.0.0.0')
# setting port step
address_obj.Steps.Step = '1.0.0.0'
address_obj.Steps.Enabled = False
assert address_obj.Values[1] == '1.1.1.1'
assert address_obj.Steps.Enabled == False
def test_port_step_can_retrieve_owner(vports):
vport_1, vport_2 = vports
ixnetwork = vport_1._parent
topo = ixnetwork.Topology.add(Vports=vports)
ipv4_1 = topo.DeviceGroup.add(Multiplier=1). \
Ethernet.add(). \
Ipv4.add()
address_obj = ipv4_1.Address
address_obj.Increment(start_value='1.1.1.1', step_value='0.0.0.0')
# setting port step
owner = address_obj.Steps.Owner
assert owner.split('/')[-2] == 'topology'
def test_port_step_can_retrieve_description(vports):
vport_1, vport_2 = vports
ixnetwork = vport_1._parent
topo = ixnetwork.Topology.add(Vports=vports)
ipv4_1 = topo.DeviceGroup.add(Multiplier=1). \
Ethernet.add(). \
Ipv4.add()
address_obj = ipv4_1.Address
address_obj.Increment(start_value='1.1.1.1', step_value='0.0.0.0')
# setting port step
desc = address_obj.Steps.Description
assert desc.lower() == 'port step'
| 35.803571
| 70
| 0.679302
| 303
| 2,005
| 4.264026
| 0.128713
| 0.027864
| 0.027864
| 0.018576
| 0.853715
| 0.787926
| 0.747678
| 0.722136
| 0.704334
| 0.704334
| 0
| 0.056373
| 0.186035
| 2,005
| 55
| 71
| 36.454545
| 0.735294
| 0.035411
| 0
| 0.723404
| 0
| 0
| 0.056535
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 1
| 0.085106
| false
| 0
| 0
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
11c85326ae839e561bde80ec0a4f18b116f9fbeb
| 274
|
py
|
Python
|
app/routes.py
|
hasan-haider/Anywhere-use-systematic-Flask-web-app
|
5a1401f385386e2f24fdcbf77d27e6bd0cb835b6
|
[
"MIT"
] | 1
|
2021-06-02T13:14:47.000Z
|
2021-06-02T13:14:47.000Z
|
app/routes.py
|
hasan-haider/Anywhere-use-systematic-Flask-web-app
|
5a1401f385386e2f24fdcbf77d27e6bd0cb835b6
|
[
"MIT"
] | null | null | null |
app/routes.py
|
hasan-haider/Anywhere-use-systematic-Flask-web-app
|
5a1401f385386e2f24fdcbf77d27e6bd0cb835b6
|
[
"MIT"
] | null | null | null |
from flask import render_template
from app import app,APP_ROOT
@app.route('/')
def home():
return render_template('index.html',title='Home')
@app.route('/about')
def about():
return render_template('about.html',title='About',name='Passed by variable')
| 22.833333
| 81
| 0.689781
| 38
| 274
| 4.868421
| 0.5
| 0.227027
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153285
| 274
| 11
| 82
| 24.909091
| 0.797414
| 0
| 0
| 0
| 0
| 0
| 0.205323
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.125
| 0.25
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
11c873e702cbd937adcb0c2d59b137baee4cfadb
| 282
|
py
|
Python
|
keras/engine/saving.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 5
|
2020-11-30T22:26:03.000Z
|
2020-12-01T22:34:25.000Z
|
keras/engine/saving.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 10
|
2020-12-01T22:55:29.000Z
|
2020-12-11T18:31:46.000Z
|
keras/engine/saving.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 15
|
2020-11-30T22:12:22.000Z
|
2020-12-09T01:32:48.000Z
|
"""Model saving utilities."""
from tensorflow.keras.models import save_model
from tensorflow.keras.models import load_model
from tensorflow.keras.models import model_from_config
from tensorflow.keras.models import model_from_yaml
from tensorflow.keras.models import model_from_json
| 40.285714
| 53
| 0.858156
| 41
| 282
| 5.707317
| 0.317073
| 0.299145
| 0.405983
| 0.534188
| 0.820513
| 0.688034
| 0.512821
| 0
| 0
| 0
| 0
| 0
| 0.08156
| 282
| 6
| 54
| 47
| 0.903475
| 0.08156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
eeced78ed9d32f212ff46af9da5611ab937bf207
| 3,937
|
py
|
Python
|
examples/src/Charts/AnimatingCategoriesElements.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
examples/src/Charts/AnimatingCategoriesElements.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
examples/src/Charts/AnimatingCategoriesElements.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
import aspose.slides as slides
def charts_animating_categories_elements():
#ExStart:AnimatingCategoriesElements
# The path to the documents directory.
dataDir = "./examples/data/"
outDir = "./examples/out/"
with slides.Presentation(dataDir + "charts_existing_chart.pptx") as presentation:
# Get reference of the chart object
slide = presentation.slides[0]
shapes = slide.shapes
chart = shapes[0]
# Animate categories' elements
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectType.FADE, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 0, 0, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 0, 1, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 0, 2, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 0, 3, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 1, 0, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 1, 1, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 1, 2, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 1, 3, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 2, 0, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 2, 1, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 2, 2, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_CATEGORY, 2, 3, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
# Write the presentation file to disk
presentation.save(outDir + "charts_animating_categories_elements_out.pptx", slides.export.SaveFormat.PPTX)
#ExEnd:AnimatingCategoriesElements
| 106.405405
| 254
| 0.824232
| 443
| 3,937
| 7.137698
| 0.14447
| 0.241935
| 0.069892
| 0.102783
| 0.842188
| 0.842188
| 0.842188
| 0.842188
| 0.842188
| 0.825111
| 0
| 0.007232
| 0.086868
| 3,937
| 36
| 255
| 109.361111
| 0.872045
| 0.051816
| 0
| 0
| 0
| 0
| 0.027383
| 0.01906
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.045455
| null | null | 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0133dcece68ca3ef564423f87b1e620c4c628136
| 150
|
py
|
Python
|
dj_tasks/apps.py
|
cfc603/dj-tasks
|
a28a528844351e7cfc6e89d4b5d7cbf8d0df9dc8
|
[
"MIT"
] | null | null | null |
dj_tasks/apps.py
|
cfc603/dj-tasks
|
a28a528844351e7cfc6e89d4b5d7cbf8d0df9dc8
|
[
"MIT"
] | 3
|
2020-10-08T18:37:38.000Z
|
2020-12-29T17:31:43.000Z
|
dj_tasks/apps.py
|
cfc603/dj-tasks
|
a28a528844351e7cfc6e89d4b5d7cbf8d0df9dc8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8
from django.apps import AppConfig # pragma: no cover
class DjTasksConfig(AppConfig): # pragma: no cover
name = 'dj_tasks'
| 21.428571
| 53
| 0.693333
| 20
| 150
| 5.15
| 0.8
| 0.291262
| 0.330097
| 0.427184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 0.193333
| 150
| 6
| 54
| 25
| 0.842975
| 0.34
| 0
| 0
| 0
| 0
| 0.084211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
09a89fd6f12c778cbd68e7e058c9a94ee490d856
| 155
|
py
|
Python
|
wk/basic/__init__.py
|
Peiiii/wk
|
dcf948c1cb36c1eec9b2a554ea0296c6d3dbbdc4
|
[
"MIT"
] | null | null | null |
wk/basic/__init__.py
|
Peiiii/wk
|
dcf948c1cb36c1eec9b2a554ea0296c6d3dbbdc4
|
[
"MIT"
] | null | null | null |
wk/basic/__init__.py
|
Peiiii/wk
|
dcf948c1cb36c1eec9b2a554ea0296c6d3dbbdc4
|
[
"MIT"
] | null | null | null |
from .types import *
from .other_utils import *
from .math_utils import *
from .string_utils import *
from .time_utils import *
from . import string_utils
| 22.142857
| 27
| 0.774194
| 23
| 155
| 5
| 0.347826
| 0.434783
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154839
| 155
| 6
| 28
| 25.833333
| 0.877863
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
09f21b35cc3b21d95e95439876f407d53d00ea4a
| 340
|
py
|
Python
|
examples/ansible-lint-s2i/app.py
|
godleon/learning_openshift
|
e9d7611d674b21667cc9fb2b82fb455fc42d5c3a
|
[
"MIT"
] | null | null | null |
examples/ansible-lint-s2i/app.py
|
godleon/learning_openshift
|
e9d7611d674b21667cc9fb2b82fb455fc42d5c3a
|
[
"MIT"
] | null | null | null |
examples/ansible-lint-s2i/app.py
|
godleon/learning_openshift
|
e9d7611d674b21667cc9fb2b82fb455fc42d5c3a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import time
print("Hello Ansible Lint (this is in app.py)")
time.sleep(10)
print("Hello Ansible Lint (this is in app.py)")
time.sleep(10)
print("Hello Ansible Lint (this is in app.py)")
time.sleep(10)
print("Hello Ansible Lint (this is in app.py)")
time.sleep(10)
print("Hello Ansible Lint (this is in app.py)")
| 16.190476
| 47
| 0.697059
| 62
| 340
| 3.822581
| 0.258065
| 0.21097
| 0.35865
| 0.443038
| 0.902954
| 0.902954
| 0.902954
| 0.902954
| 0.902954
| 0.902954
| 0
| 0.031142
| 0.15
| 340
| 21
| 48
| 16.190476
| 0.788927
| 0.05
| 0
| 0.9
| 0
| 0
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.1
| 0
| 0.1
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
61d8334c204cdd288efed64c43d331b1b43cccbc
| 11,898
|
py
|
Python
|
engine/collision/tests/test_collision_resolution_physical.py
|
codehearts/pickles-fetch-quest
|
ca9b3c7fe26acb50e1e2d654d068f5bb953bc427
|
[
"MIT"
] | 3
|
2017-12-07T19:17:36.000Z
|
2021-07-29T18:24:25.000Z
|
engine/collision/tests/test_collision_resolution_physical.py
|
codehearts/pickles-fetch-quest
|
ca9b3c7fe26acb50e1e2d654d068f5bb953bc427
|
[
"MIT"
] | 41
|
2017-11-11T06:00:08.000Z
|
2022-03-28T23:27:25.000Z
|
engine/collision/tests/test_collision_resolution_physical.py
|
codehearts/pickles-fetch-quest
|
ca9b3c7fe26acb50e1e2d654d068f5bb953bc427
|
[
"MIT"
] | 2
|
2018-08-31T23:49:00.000Z
|
2021-09-21T00:42:48.000Z
|
from ..collision_resolution_physical import resolve_physical_collision
from ..collision_resolution_physical import resolve_game_object_x_collision
from ..collision_resolution_physical import resolve_game_object_y_collision
from unittest.mock import Mock, patch
import unittest
class TestResolvePhysicalCollision(unittest.TestCase):
"""Test functionality of resolve_physical_collision function."""
collision_2d_module = 'engine.collision.collision_resolution_physical'
@patch(collision_2d_module + '.resolve_game_object_y_collision')
@patch(collision_2d_module + '.resolve_game_object_x_collision')
@patch('engine.geometry.detect_overlap_2d')
def test_no_overlap(self, mock_2d_detect, mock_x_resolve, mock_y_resolve):
"""No work is performed if neither object overlaps."""
first = Mock(velocity=Mock())
second = Mock(velocity=Mock())
mock_2d_detect.return_value = False
self.assertEqual((0, 0), resolve_physical_collision(first, second))
mock_2d_detect.assert_called_once_with(first, second)
mock_x_resolve.assert_not_called()
mock_y_resolve.assert_not_called()
first.set_position.assert_not_called()
second.set_position.assert_not_called()
@patch(collision_2d_module + '.resolve_game_object_y_collision')
@patch(collision_2d_module + '.resolve_game_object_x_collision')
@patch('engine.geometry.detect_overlap_2d')
def test_first_is_lighter(self, mock_2d_detect, mock_x_resolve,
mock_y_resolve):
"""First object moves when mass is less than second's."""
first = Mock(x=1, y=2, width=3, height=4,
mass=1, velocity=Mock(x=1, y=2))
second = Mock(x=1, y=2, width=3, height=4,
mass=2, velocity=Mock(x=1, y=2))
mock_2d_detect.return_value = True
mock_x_resolve.return_value = 1
mock_y_resolve.return_value = 2
self.assertEqual((1, 2), resolve_physical_collision(first, second))
mock_2d_detect.assert_called_once()
mock_x_resolve.assert_called_once_with(first, second)
mock_y_resolve.assert_called_once_with(first, second)
first.set_position.assert_not_called()
second.set_position.assert_not_called()
@patch(collision_2d_module + '.resolve_game_object_y_collision')
@patch(collision_2d_module + '.resolve_game_object_x_collision')
@patch('engine.geometry.detect_overlap_2d')
def test_first_has_equal_mass(self, mock_2d_detect, mock_x_resolve,
mock_y_resolve):
"""First object moves when mass is equal to second's."""
first = Mock(x=1, y=2, width=3, height=4,
mass=1, velocity=Mock(x=-1, y=2))
second = Mock(x=1, y=2, width=3, height=4,
mass=1, velocity=Mock(x=1, y=2))
mock_2d_detect.return_value = True
mock_x_resolve.return_value = 1
mock_y_resolve.return_value = 2
self.assertEqual((1, 2), resolve_physical_collision(first, second))
mock_2d_detect.assert_called_once()
mock_x_resolve.assert_called_once_with(first, second)
mock_y_resolve.assert_called_once_with(first, second)
first.set_position.assert_not_called()
second.set_position.assert_not_called()
@patch(collision_2d_module + '.resolve_game_object_y_collision')
@patch(collision_2d_module + '.resolve_game_object_x_collision')
@patch('engine.geometry.detect_overlap_2d')
def test_first_is_heavier(self, mock_2d_detect, mock_x_resolve,
mock_y_resolve):
"""Second object moves when mass is less than first's."""
first = Mock(x=1, y=2, width=3, height=4,
mass=2, velocity=Mock(x=-1, y=-2))
second = Mock(x=1, y=2, width=3, height=4,
mass=1, velocity=Mock(x=1, y=2))
mock_2d_detect.return_value = True
mock_x_resolve.return_value = 1
mock_y_resolve.return_value = 2
self.assertEqual((1, 2), resolve_physical_collision(first, second))
mock_2d_detect.assert_called_once()
mock_x_resolve.assert_called_once_with(second, first)
mock_y_resolve.assert_called_once_with(second, first)
first.set_position.assert_not_called()
second.set_position.assert_not_called()
@patch(collision_2d_module + '.get_nonoverlapping_coordinate_1d')
@patch('engine.geometry.detect_overlap_1d')
def test_velocity_is_reset_along_x(self, mock_1d_detect, mock_1d_resolve):
"""X velocity is cancelled when moved along x axis."""
moving = Mock(
x=1, y=2, width=3, height=4,
velocity=Mock(x=10, y=20), acceleration=Mock(x=10, y=20))
resting = Mock(
x=1, y=2, width=3, height=4,
velocity=Mock(x=10, y=20), acceleration=Mock(x=10, y=20))
# Resolve the objects along the x axis
mock_1d_detect.return_value = True
mock_1d_resolve.return_value = 3
self.assertEqual(10, resolve_game_object_x_collision(moving, resting))
# Resolution was performed with correct dimensions
mock_1d_detect.assert_called_once_with(-18, 4, 2, 4)
mock_1d_resolve.assert_called_once_with(1, 3, 10, 1, 3)
# Only the x velocity of the moving object was cancelled
self.assertEqual(
(0, 20),
(moving.velocity.x, moving.velocity.y),
"Only moving x velocity should have been reset")
self.assertEqual(
(0, 20),
(moving.acceleration.x, moving.acceleration.y),
"Only moving x acceleration should have been reset")
# No velocity on the resting object was cancelled
self.assertEqual(
(10, 20),
(resting.velocity.x, resting.velocity.y),
"Resting velocities should not have been reset")
self.assertEqual(
(10, 20),
(resting.acceleration.x, resting.acceleration.y),
"Resting accelerations should not have been reset")
@patch(collision_2d_module + '.get_nonoverlapping_coordinate_1d')
@patch('engine.geometry.detect_overlap_1d')
def test_velocity_is_reset_along_y(self, mock_1d_detect, mock_1d_resolve):
"""Y velocity is cancelled when moved along y axis."""
moving = Mock(
x=1, y=2, width=3, height=4,
velocity=Mock(x=10, y=20), acceleration=Mock(x=10, y=20))
resting = Mock(
x=1, y=2, width=3, height=4,
velocity=Mock(x=10, y=20), acceleration=Mock(x=10, y=20))
# Resolve the objects along the y axis
mock_1d_detect.return_value = True
mock_1d_resolve.return_value = 4
self.assertEqual(20, resolve_game_object_y_collision(moving, resting))
# Resolution was performed with correct dimensions
mock_1d_detect.assert_called_once_with(1, 3, 1, 3)
mock_1d_resolve.assert_called_once_with(2, 4, 20, 2, 4)
# Only the y velocity of the moving object was cancelled
self.assertEqual(
(10, 0),
(moving.velocity.x, moving.velocity.y),
"Only moving y velocity should have been reset")
self.assertEqual(
(10, 0),
(moving.acceleration.x, moving.acceleration.y),
"Only moving y acceleration should have been reset")
# No velocity on the resting object was cancelled
self.assertEqual(
(10, 20),
(resting.velocity.x, resting.velocity.y),
"Resting velocities should not have been reset")
self.assertEqual(
(10, 20),
(resting.acceleration.x, resting.acceleration.y),
"Resting accelerations should not have been reset")
@patch(collision_2d_module + '.get_nonoverlapping_coordinate_1d')
@patch('engine.geometry.detect_overlap_1d')
def test_resolve_x_axis_no_overlap(self, mock_1d_detect, mock_1d_resolve):
"""Resolving x axis does nothing if no overlap exists."""
moving = Mock(x=1, y=2, width=3, height=4, velocity=Mock(y=5))
resting = Mock(x=10, y=20, width=30, height=40)
# Resolve the objects along the x axis
mock_1d_detect.return_value = False
self.assertEqual(0, resolve_game_object_x_collision(moving, resting))
mock_1d_detect.assert_called_once_with(-3, 4, 20, 40)
mock_1d_resolve.assert_not_called()
# Moving and resting objects were not repositioned
self.assertEqual(
(1, 2, 3, 4),
(moving.x, moving.y, moving.width, moving.height))
self.assertEqual(
(10, 20, 30, 40),
(resting.x, resting.y, resting.width, resting.height))
@patch(collision_2d_module + '.get_nonoverlapping_coordinate_1d')
@patch('engine.geometry.detect_overlap_1d')
def test_object_is_moved_along_x(self, mock_1d_detect, mock_1d_resolve):
"""X coordinate is changed when moved along x axis."""
moving = Mock(x=1, y=2, width=3, height=4, velocity=Mock(x=5, y=6))
resting = Mock(x=1, y=2, width=3, height=4)
# Resolve the objects along the x axis
mock_1d_detect.return_value = True
mock_1d_resolve.return_value = -2
self.assertEqual(5, resolve_game_object_x_collision(moving, resting))
# Resolution was performed with correct dimensions
mock_1d_detect.assert_called_once_with(-4, 4, 2, 4)
mock_1d_resolve.assert_called_once_with(1, 3, 5, 1, 3)
# Only the moving object's x coordinate was repositioned
self.assertEqual(
(-2, 2, 3, 4),
(moving.x, moving.y, moving.width, moving.height))
self.assertEqual(
(1, 2, 3, 4),
(resting.x, resting.y, resting.width, resting.height))
@patch(collision_2d_module + '.get_nonoverlapping_coordinate_1d')
@patch('engine.geometry.detect_overlap_1d')
def test_resolve_y_axis_no_overlap(self, mock_1d_detect, mock_1d_resolve):
"""Resolving y axis does nothing if no overlap exists."""
moving = Mock(x=1, y=2, width=3, height=4)
resting = Mock(x=10, y=20, width=30, height=40)
# Resolve the objects along the y axis
mock_1d_detect.return_value = False
self.assertEqual(0, resolve_game_object_y_collision(moving, resting))
mock_1d_detect.assert_called_once_with(1, 3, 10, 30)
mock_1d_resolve.assert_not_called()
# Moving and resting objects were not repositioned
self.assertEqual(
(1, 2, 3, 4),
(moving.x, moving.y, moving.width, moving.height))
self.assertEqual(
(10, 20, 30, 40),
(resting.x, resting.y, resting.width, resting.height))
@patch(collision_2d_module + '.get_nonoverlapping_coordinate_1d')
@patch('engine.geometry.detect_overlap_1d')
def test_object_is_moved_along_y(self, mock_1d_detect, mock_1d_resolve):
"""Y coordinate is changed when moved along y axis."""
moving = Mock(x=1, y=2, width=3, height=4, velocity=Mock(y=5))
resting = Mock(x=1, y=2, width=3, height=4)
# Resolve the objects along the y axis
mock_1d_detect.return_value = True
mock_1d_resolve.return_value = -2
self.assertEqual(5, resolve_game_object_y_collision(moving, resting))
# Resolution was performed with correct dimensions
mock_1d_detect.assert_called_once_with(1, 3, 1, 3)
mock_1d_resolve.assert_called_once_with(2, 4, 5, 2, 4)
# Only the moving object's y coordinate was repositioned
self.assertEqual(
(1, -2, 3, 4),
(moving.x, moving.y, moving.width, moving.height))
self.assertEqual(
(1, 2, 3, 4),
(resting.x, resting.y, resting.width, resting.height))
| 43.742647
| 78
| 0.657842
| 1,638
| 11,898
| 4.510379
| 0.065324
| 0.029778
| 0.017867
| 0.020845
| 0.933947
| 0.917975
| 0.901191
| 0.868977
| 0.845154
| 0.813752
| 0
| 0.039372
| 0.24004
| 11,898
| 271
| 79
| 43.904059
| 0.777704
| 0.117247
| 0
| 0.729592
| 0
| 0
| 0.115436
| 0.079578
| 0
| 0
| 0
| 0
| 0.295918
| 1
| 0.05102
| false
| 0
| 0.02551
| 0
| 0.086735
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61e0d29cf64b3ed289f933b8084a9d941e32877c
| 3,780
|
py
|
Python
|
tests/unit/server_types/conftest.py
|
mascheck/hcloud-python
|
ffbfd32418c191676ce7f03a5e384bb123eb904a
|
[
"MIT"
] | 2
|
2020-12-11T18:09:44.000Z
|
2020-12-12T05:53:22.000Z
|
tests/unit/server_types/conftest.py
|
mascheck/hcloud-python
|
ffbfd32418c191676ce7f03a5e384bb123eb904a
|
[
"MIT"
] | null | null | null |
tests/unit/server_types/conftest.py
|
mascheck/hcloud-python
|
ffbfd32418c191676ce7f03a5e384bb123eb904a
|
[
"MIT"
] | 1
|
2019-06-19T17:53:10.000Z
|
2019-06-19T17:53:10.000Z
|
import pytest
@pytest.fixture()
def server_type_response():
return {
"server_type": {
"id": 1,
"name": "cx11",
"description": "CX11",
"cores": 1,
"memory": 1,
"disk": 25,
"prices": [
{
"location": "fsn1",
"price_hourly": {
"net": "1.0000000000",
"gross": "1.1900000000000000"
},
"price_monthly": {
"net": "1.0000000000",
"gross": "1.1900000000000000"
}
}
],
"storage_type": "local",
"cpu_type": "shared"
}
}
@pytest.fixture()
def two_server_types_response():
return {
"server_types": [
{
"id": 1,
"name": "cx11",
"description": "CX11",
"cores": 1,
"memory": 1,
"disk": 25,
"prices": [
{
"location": "fsn1",
"price_hourly": {
"net": "1.0000000000",
"gross": "1.1900000000000000"
},
"price_monthly": {
"net": "1.0000000000",
"gross": "1.1900000000000000"
}
}
],
"storage_type": "local",
"cpu_type": "shared"
},
{
"id": 2,
"name": "cx21",
"description": "CX21",
"cores": 2,
"memory": 4.0,
"disk": 40,
"prices": [
{
"location": "fsn1",
"price_hourly": {
"net": "0.0080000000",
"gross": "0.0095200000000000"
},
"price_monthly": {
"net": "4.9000000000",
"gross": "5.8310000000000000"
}
},
{
"location": "nbg1",
"price_hourly": {
"net": "0.0080000000",
"gross": "0.0095200000000000"
},
"price_monthly": {
"net": "4.9000000000",
"gross": "5.8310000000000000"
}
}
],
"storage_type": "local",
"cpu_type": "shared"
}
]
}
@pytest.fixture()
def one_server_types_response():
return {
"server_types": [
{
"id": 1,
"name": "cx11",
"description": "CX11",
"cores": 1,
"memory": 1,
"disk": 25,
"prices": [
{
"location": "fsn1",
"price_hourly": {
"net": "1.0000000000",
"gross": "1.1900000000000000"
},
"price_monthly": {
"net": "1.0000000000",
"gross": "1.1900000000000000"
}
}
],
"storage_type": "local",
"cpu_type": "shared"
}
]
}
| 29.76378
| 57
| 0.27963
| 198
| 3,780
| 5.191919
| 0.227273
| 0.023346
| 0.081712
| 0.110895
| 0.877432
| 0.877432
| 0.859922
| 0.859922
| 0.859922
| 0.800584
| 0
| 0.212822
| 0.599735
| 3,780
| 126
| 58
| 30
| 0.466623
| 0
| 0
| 0.633333
| 0
| 0
| 0.240212
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| true
| 0
| 0.008333
| 0.025
| 0.058333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
115a37669beea36d260aefca6ab68734b7659f9d
| 164
|
py
|
Python
|
py_zipkin/__init__.py
|
datree-demo/py_zipkin
|
0005ee2f0b00671fa08599d47c485e444eeb7beb
|
[
"Apache-2.0"
] | null | null | null |
py_zipkin/__init__.py
|
datree-demo/py_zipkin
|
0005ee2f0b00671fa08599d47c485e444eeb7beb
|
[
"Apache-2.0"
] | null | null | null |
py_zipkin/__init__.py
|
datree-demo/py_zipkin
|
0005ee2f0b00671fa08599d47c485e444eeb7beb
|
[
"Apache-2.0"
] | null | null | null |
# Export useful functions and types from private modules.
from py_zipkin.encoding._types import Encoding # noqa
from py_zipkin.encoding._types import Kind # noqa
| 41
| 57
| 0.810976
| 24
| 164
| 5.375
| 0.583333
| 0.093023
| 0.186047
| 0.310078
| 0.48062
| 0.48062
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140244
| 164
| 3
| 58
| 54.666667
| 0.914894
| 0.396341
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3a01b4c267c3c8bd73dd26e90092975b73c148af
| 4,191
|
py
|
Python
|
web/transiq/broker/migrations/0011_auto_20180619_1643.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/broker/migrations/0011_auto_20180619_1643.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/broker/migrations/0011_auto_20180619_1643.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.0.5 on 2018-06-19 16:43
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('broker', '0010_auto_20180618_1151'),
]
operations = [
migrations.AddField(
model_name='broker',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='broker',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='broker',
name='updated_on',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='brokeraccount',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_account_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='brokeraccount',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_account_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='brokerdriver',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_driver_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='brokerdriver',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_driver_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='brokerowner',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_owner_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='brokerowner',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_owner_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='brokervehicle',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_vehicle_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='brokervehicle',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='broker_vehicle_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='document',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='document_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='document',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,
related_name='document_created_by', to=settings.AUTH_USER_MODEL),
),
]
| 45.064516
| 107
| 0.593892
| 428
| 4,191
| 5.551402
| 0.135514
| 0.047138
| 0.076599
| 0.12037
| 0.874158
| 0.874158
| 0.837963
| 0.824495
| 0.824495
| 0.824495
| 0
| 0.010638
| 0.304701
| 4,191
| 92
| 108
| 45.554348
| 0.804736
| 0.010737
| 0
| 0.724138
| 1
| 0
| 0.134411
| 0.052365
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3a0de365e995e0f00dd5abc440ac708f5cd5c89e
| 6,142
|
py
|
Python
|
rlogbook/computing/models.py
|
tamasgal/rlogbook
|
c00e2fd0e9afd09d6af80566731356019fee5f36
|
[
"MIT"
] | null | null | null |
rlogbook/computing/models.py
|
tamasgal/rlogbook
|
c00e2fd0e9afd09d6af80566731356019fee5f36
|
[
"MIT"
] | null | null | null |
rlogbook/computing/models.py
|
tamasgal/rlogbook
|
c00e2fd0e9afd09d6af80566731356019fee5f36
|
[
"MIT"
] | null | null | null |
from django.db import models
class OperatingSystem(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
class Meta:
ordering = ('name', )
class IPPolicy(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
class Meta:
ordering = ('name', )
class Sector(models.Model):
name = models.CharField(max_length=200)
from_ip = models.GenericIPAddressField()
to_ip = models.GenericIPAddressField()
def __unicode__(self):
return self.name
class Meta:
ordering = ('name', )
class Subnet(models.Model):
name = models.CharField(max_length=200)
from_ip = models.GenericIPAddressField()
to_ip = models.GenericIPAddressField()
sector = models.ForeignKey(Sector, null=True)
def __unicode__(self):
return u"[{0}] {1}".format(self.sector, self.name)
class Meta:
ordering = ('sector', 'name')
class ComputerType(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return self.name
class Meta:
ordering = ('name', )
class CommonComputerInfo(models.Model):
pass
class Meta:
abstract = True
class Warranty(models.Model):
name = models.CharField(max_length=200)
warranty_length = models.PositiveSmallIntegerField(null=True, blank=True)
def __unicode__(self):
if self.warranty_length:
return "{0} ({1} months)".format(self.name, self.warranty_length)
return self.name
class Meta:
ordering = ('name', )
class RRZELicense(models.Model):
name = models.CharField(max_length=200)
order_nr = models.CharField(max_length=200)
def __unicode__(self):
return self.name
class Meta:
ordering = ('name', 'order_nr')
class Printer(models.Model):
manufacturer = models.CharField(max_length=200)
model = models.CharField(max_length=200)
PRINTER_TYPE_CHOICES = (
('0', 'laser'),
('1', 'ink-jet'),
('2', 'dot-matrix'),
('3', 'lcd/led'),
('4', 'thermal'),
)
printer_type = models.CharField(max_length=1, choices=PRINTER_TYPE_CHOICES)
color = models.BooleanField(default=True)
name = models.CharField(max_length=200, null=True, blank=True)
inventory_number = models.CharField(max_length=200, null=True, blank=True)
serial_number = models.CharField(max_length=200, null=True, blank=True)
mac_address = models.CharField(max_length=17, null=True, blank=True)
subnet = models.ForeignKey(Subnet, null=True, blank=True)
dns_cname = models.CharField(max_length=200, null=True, blank=True)
dns_hinfo_computer = models.CharField(max_length=200, null=True, blank=True)
ip_policy = models.ForeignKey(IPPolicy, null=True, blank=True)
hostname = models.CharField(max_length=200, null=True, blank=True)
user = models.ForeignKey('facility.user', null=True, blank=True)
room = models.ForeignKey('facility.room', null=True, blank=True)
purpose = models.TextField(null=True, blank=True)
prior_purpose = models.TextField(null=True, blank=True)
comment = models.TextField(null=True, blank=True)
ip = models.GenericIPAddressField(null=True, blank=True)
repair_log = models.TextField(null=True, blank=True)
# Warranty and purchase information
warranty = models.ForeignKey(Warranty, null=True, blank=True)
purchase_date = models.DateField('purchase date', null=True, blank=True)
def __unicode__(self):
return self.name or self.hostname or 'Printer'
class Computer(models.Model):
inventory_number = models.CharField(max_length=200, null=True, blank=True)
serial_number = models.CharField(max_length=200, null=True, blank=True)
mac_address = models.CharField(max_length=17, null=True, blank=True)
subnet = models.ForeignKey(Subnet, null=True, blank=True)
dns_cname = models.CharField(max_length=200, null=True, blank=True)
dns_hinfo_computer = models.CharField(max_length=200, null=True, blank=True)
ip_policy = models.ForeignKey(IPPolicy, null=True, blank=True)
os = models.ForeignKey(OperatingSystem, null=True, blank=True)
expiration_date = models.DateTimeField('expiration date', null=True, blank=True)
software_licenses = models.CharField(max_length=200, null=True, blank=True)
name = models.CharField(max_length=200, null=True, blank=True)
computer_type = models.ForeignKey(ComputerType, null=True, blank=True)
hostname = models.CharField(max_length=200, null=True, blank=True)
user = models.ForeignKey('facility.user', null=True, blank=True)
room = models.ForeignKey('facility.room', null=True, blank=True)
purpose = models.TextField(null=True, blank=True)
prior_purpose = models.TextField(null=True, blank=True)
comment = models.TextField(null=True, blank=True)
ip = models.GenericIPAddressField(null=True, blank=True)
repair_log = models.TextField(null=True, blank=True)
# Hardware
ram = models.PositiveSmallIntegerField(null=True, blank=True)
# Software
standard_software = models.CharField(max_length=200, null=True, blank=True)
additional_software = models.CharField(max_length=200, null=True, blank=True)
rrze_licenses = models.ManyToManyField(RRZELicense, blank=True)
# Warranty and purchase information
warranty = models.ForeignKey(Warranty, null=True, blank=True)
purchase_date = models.DateField('purchase date', null=True, blank=True)
bill = models.CharField(max_length=200, null=True, blank=True)
# Apple specific
mac_airport = models.CharField(max_length=17, null=True, blank=True)
mac_bluetooth = models.CharField(max_length=17, null=True, blank=True)
model_year = models.CharField(max_length=50, null=True, blank=True)
part_no = models.CharField(max_length=50, null=True, blank=True)
netrestore_image = models.CharField(max_length=200, null=True, blank=True)
#todo = models.TextField()
def __unicode__(self):
return self.name or self.hostname or 'Computer'
| 33.933702
| 84
| 0.703191
| 774
| 6,142
| 5.431525
| 0.144703
| 0.09705
| 0.154615
| 0.202188
| 0.800428
| 0.766413
| 0.747621
| 0.740247
| 0.710752
| 0.639391
| 0
| 0.020352
| 0.176001
| 6,142
| 180
| 85
| 34.122222
| 0.810314
| 0.020352
| 0
| 0.609756
| 0
| 0
| 0.035952
| 0
| 0
| 0
| 0
| 0.005556
| 0
| 1
| 0.073171
| false
| 0.00813
| 0.00813
| 0.065041
| 0.869919
| 0.00813
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
3a33038e7041009a7fe654e91c569d5139f7970a
| 48
|
py
|
Python
|
joe_username/__init__.py
|
shreyas44/joe-username
|
add4ef5d44948401e2dd096ff9a73df456548c44
|
[
"MIT"
] | null | null | null |
joe_username/__init__.py
|
shreyas44/joe-username
|
add4ef5d44948401e2dd096ff9a73df456548c44
|
[
"MIT"
] | null | null | null |
joe_username/__init__.py
|
shreyas44/joe-username
|
add4ef5d44948401e2dd096ff9a73df456548c44
|
[
"MIT"
] | null | null | null |
from .generate import generate, JOES_DICTIONARY
| 24
| 47
| 0.854167
| 6
| 48
| 6.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 48
| 1
| 48
| 48
| 0.930233
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
28e259911cc3b8bc01a7e8136b0fd5ed6faeebbb
| 9,397
|
py
|
Python
|
zadanie/articles/tests/test_urls.py
|
szymanskirafal/zadanie
|
8668976955eeea45e4abec673ba5a6c4f3848e16
|
[
"MIT"
] | null | null | null |
zadanie/articles/tests/test_urls.py
|
szymanskirafal/zadanie
|
8668976955eeea45e4abec673ba5a6c4f3848e16
|
[
"MIT"
] | null | null | null |
zadanie/articles/tests/test_urls.py
|
szymanskirafal/zadanie
|
8668976955eeea45e4abec673ba5a6c4f3848e16
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.urls import resolve, reverse
class TestEntryCreateUrl(TestCase):
def setUp(self):
self.url_resolved = resolve('/articles/create/')
def test_url_app_name(self):
app_name_given = self.url_resolved.app_name
app_name_expected = 'articles'
self.assertEqual(app_name_given, app_name_expected)
def test_url_reverse(self):
url_given = reverse('articles:create')
url_expected = '/articles/create/'
self.assertEqual(url_given, url_expected)
def test_url_func_name(self):
func_name_given = self.url_resolved.func.__name__
func_name_expected = 'ArticleCreateView'
self.assertEqual(func_name_given, func_name_expected)
def test_url_name(self):
url_name_given = self.url_resolved.url_name
url_name_expected = 'create'
self.assertEqual(url_name_given, url_name_expected)
def test_url_namespace(self):
namespace_given = self.url_resolved.namespace
namespace_expected = 'articles'
self.assertEqual(namespace_given, namespace_expected)
def test_url_view_name(self):
view_name_given = self.url_resolved.view_name
view_name_expected = 'articles:create'
self.assertEqual(view_name_given, view_name_expected)
class TestArticleCreatedUrl(TestCase):
def setUp(self):
self.url_resolved = resolve('/articles/created/')
def test_url_app_name(self):
app_name_given = self.url_resolved.app_name
app_name_expected = 'articles'
self.assertEqual(app_name_given, app_name_expected)
def test_url_reverse(self):
url_given = reverse('articles:created')
url_expected = '/articles/created/'
self.assertEqual(url_given, url_expected)
def test_url_func_name(self):
func_name_given = self.url_resolved.func.__name__
func_name_expected = 'ArticleCreatedTemplateView'
self.assertEqual(func_name_given, func_name_expected)
def test_url_name(self):
url_name_given = self.url_resolved.url_name
url_name_expected = 'created'
self.assertEqual(url_name_given, url_name_expected)
def test_url_namespace(self):
namespace_given = self.url_resolved.namespace
namespace_expected = 'articles'
self.assertEqual(namespace_given, namespace_expected)
def test_url_view_name(self):
view_name_given = self.url_resolved.view_name
view_name_expected = 'articles:created'
self.assertEqual(view_name_given, view_name_expected)
class TestArticlesDetailUrl(TestCase):
def setUp(self):
self.url_resolved = resolve('/articles/1/')
def test_url_app_name(self):
app_name_given = self.url_resolved.app_name
app_name_expected = 'articles'
self.assertEqual(app_name_given, app_name_expected)
def test_url_reverse(self):
url_given = reverse('articles:detail', kwargs = {'pk': 1})
url_expected = '/articles/1/'
self.assertEqual(url_given, url_expected)
def test_url_func_name(self):
func_name_given = self.url_resolved.func.__name__
func_name_expected = 'ArticleDetailView'
self.assertEqual(func_name_given, func_name_expected)
def test_url_name(self):
url_name_given = self.url_resolved.url_name
url_name_expected = 'detail'
self.assertEqual(url_name_given, url_name_expected)
def test_url_namespace(self):
namespace_given = self.url_resolved.namespace
namespace_expected = 'articles'
self.assertEqual(namespace_given, namespace_expected)
def test_url_view_name(self):
view_name_given = self.url_resolved.view_name
view_name_expected = 'articles:detail'
self.assertEqual(view_name_given, view_name_expected)
def test_url_kwargs(self):
kwargs_given = self.url_resolved.kwargs
kwargs_expected = {'pk': 1}
self.assertEqual(kwargs_given, kwargs_expected)
class TestArticlesDeletelUrl(TestCase):
def setUp(self):
self.url_resolved = resolve('/articles/1/delete/')
def test_url_app_name(self):
app_name_given = self.url_resolved.app_name
app_name_expected = 'articles'
self.assertEqual(app_name_given, app_name_expected)
def test_url_reverse(self):
url_given = reverse('articles:delete', kwargs = {'pk': 1})
url_expected = '/articles/1/delete/'
self.assertEqual(url_given, url_expected)
def test_url_func_name(self):
func_name_given = self.url_resolved.func.__name__
func_name_expected = 'ArticleDeleteView'
self.assertEqual(func_name_given, func_name_expected)
def test_url_name(self):
url_name_given = self.url_resolved.url_name
url_name_expected = 'delete'
self.assertEqual(url_name_given, url_name_expected)
def test_url_namespace(self):
namespace_given = self.url_resolved.namespace
namespace_expected = 'articles'
self.assertEqual(namespace_given, namespace_expected)
def test_url_view_name(self):
view_name_given = self.url_resolved.view_name
view_name_expected = 'articles:delete'
self.assertEqual(view_name_given, view_name_expected)
def test_url_kwargs(self):
kwargs_given = self.url_resolved.kwargs
kwargs_expected = {'pk': 1}
self.assertEqual(kwargs_given, kwargs_expected)
class TestArticleDeletedUrl(TestCase):
def setUp(self):
self.url_resolved = resolve('/articles/deleted/')
def test_url_app_name(self):
app_name_given = self.url_resolved.app_name
app_name_expected = 'articles'
self.assertEqual(app_name_given, app_name_expected)
def test_url_reverse(self):
url_given = reverse('articles:deleted')
url_expected = '/articles/deleted/'
self.assertEqual(url_given, url_expected)
def test_url_func_name(self):
func_name_given = self.url_resolved.func.__name__
func_name_expected = 'ArticleDeletedTemplateView'
self.assertEqual(func_name_given, func_name_expected)
def test_url_name(self):
url_name_given = self.url_resolved.url_name
url_name_expected = 'deleted'
self.assertEqual(url_name_given, url_name_expected)
def test_url_namespace(self):
namespace_given = self.url_resolved.namespace
namespace_expected = 'articles'
self.assertEqual(namespace_given, namespace_expected)
def test_url_view_name(self):
view_name_given = self.url_resolved.view_name
view_name_expected = 'articles:deleted'
self.assertEqual(view_name_given, view_name_expected)
class TestArticlesUrl(TestCase):
def setUp(self):
self.url_resolved = resolve('/articles/')
def test_url_app_name(self):
app_name_given = self.url_resolved.app_name
app_name_expected = 'articles'
self.assertEqual(app_name_given, app_name_expected)
def test_url_reverse(self):
url_given = reverse('articles:list')
url_expected = '/articles/'
self.assertEqual(url_given, url_expected)
def test_url_func_name(self):
func_name_given = self.url_resolved.func.__name__
func_name_expected = 'ArticlesListView'
self.assertEqual(func_name_given, func_name_expected)
def test_url_name(self):
url_name_given = self.url_resolved.url_name
url_name_expected = 'list'
self.assertEqual(url_name_given, url_name_expected)
def test_url_namespace(self):
namespace_given = self.url_resolved.namespace
namespace_expected = 'articles'
self.assertEqual(namespace_given, namespace_expected)
def test_url_view_name(self):
view_name_given = self.url_resolved.view_name
view_name_expected = 'articles:list'
self.assertEqual(view_name_given, view_name_expected)
class TestArticleUpdateUrl(TestCase):
def setUp(self):
self.url_resolved = resolve('/articles/1/update/')
def test_url_app_name(self):
app_name_given = self.url_resolved.app_name
app_name_expected = 'articles'
self.assertEqual(app_name_given, app_name_expected)
def test_url_reverse(self):
url_given = reverse('articles:update', kwargs = {'pk': 1})
url_expected = '/articles/1/update/'
self.assertEqual(url_given, url_expected)
def test_url_func_name(self):
func_name_given = self.url_resolved.func.__name__
func_name_expected = 'ArticleUpdateView'
self.assertEqual(func_name_given, func_name_expected)
def test_url_name(self):
url_name_given = self.url_resolved.url_name
url_name_expected = 'update'
self.assertEqual(url_name_given, url_name_expected)
def test_url_namespace(self):
namespace_given = self.url_resolved.namespace
namespace_expected = 'articles'
self.assertEqual(namespace_given, namespace_expected)
def test_url_view_name(self):
view_name_given = self.url_resolved.view_name
view_name_expected = 'articles:update'
self.assertEqual(view_name_given, view_name_expected)
def test_url_kwargs(self):
kwargs_given = self.url_resolved.kwargs
kwargs_expected = {'pk': 1}
self.assertEqual(kwargs_given, kwargs_expected)
| 34.675277
| 66
| 0.711078
| 1,177
| 9,397
| 5.254885
| 0.039932
| 0.066774
| 0.109135
| 0.122878
| 0.916087
| 0.901859
| 0.901859
| 0.887793
| 0.887793
| 0.823767
| 0
| 0.001608
| 0.20581
| 9,397
| 270
| 67
| 34.803704
| 0.827147
| 0
| 0
| 0.748768
| 0
| 0
| 0.078536
| 0.005534
| 0
| 0
| 0
| 0
| 0.221675
| 1
| 0.256158
| false
| 0
| 0.009852
| 0
| 0.300493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e9020b5fc725dc66377bd6a7719ca6a28e05882c
| 356
|
py
|
Python
|
style_transfer/prepro/prepro.py
|
piyush-kgp/Style-Transfer
|
8b10dedeb357c304e15e278b9e29901abcaf92f7
|
[
"MIT"
] | null | null | null |
style_transfer/prepro/prepro.py
|
piyush-kgp/Style-Transfer
|
8b10dedeb357c304e15e278b9e29901abcaf92f7
|
[
"MIT"
] | 6
|
2019-12-16T21:58:08.000Z
|
2021-08-25T15:38:23.000Z
|
style_transfer/prepro/prepro.py
|
piyush01123/Style-Transfer
|
8b10dedeb357c304e15e278b9e29901abcaf92f7
|
[
"MIT"
] | 2
|
2019-08-28T13:49:43.000Z
|
2020-10-20T13:50:30.000Z
|
import cv2
def imread(fp):
"""
Handle file path exceptions and return numpy array, include a return mode
"""
# TODO: implement exceptions
return cv2.imread(fp)
def imsave(fp, img):
"""
Handle file path exceptions and return numpy array, include a return mode
"""
# TODO: implement exceptions
cv2.imwrite(fp, img)
| 20.941176
| 77
| 0.657303
| 47
| 356
| 4.978723
| 0.446809
| 0.068376
| 0.119658
| 0.205128
| 0.717949
| 0.717949
| 0.717949
| 0.717949
| 0.717949
| 0.717949
| 0
| 0.011278
| 0.252809
| 356
| 16
| 78
| 22.25
| 0.868421
| 0.567416
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
e91fa658d309756d842c8c4210409f89afd24bad
| 3,621
|
py
|
Python
|
userbot/plugins/history.py
|
RiderFA/Dark_Userbot
|
480df539bfeae994d59649a54d2478ed24b445bb
|
[
"MIT"
] | null | null | null |
userbot/plugins/history.py
|
RiderFA/Dark_Userbot
|
480df539bfeae994d59649a54d2478ed24b445bb
|
[
"MIT"
] | null | null | null |
userbot/plugins/history.py
|
RiderFA/Dark_Userbot
|
480df539bfeae994d59649a54d2478ed24b445bb
|
[
"MIT"
] | null | null | null |
from telethon import events
from telethon.errors.rpcerrorlist import YouBlockedUserError
from telethon.tl.functions.account import UpdateNotifySettingsRequest
from mafiabot import bot, CmdHelp
from mafiabot.utils import admin_cmd, edit_or_reply as eor, sudo_cmd
@mafiabot.on(admin_cmd(pattern="history ?(.*)"))
@mafiabot.on(sudo_cmd(pattern="history ?(.*)", allow_sudo=True))
async def _(mafiaevent):
if mafiaevent.fwd_from:
return
if not mafiaevent.reply_to_msg_id:
await eor(mafiaevent, "`Please Reply To A User To Get This Module Work`")
return
reply_message = await mafiaevent.get_reply_message()
chat = "Sangmatainfo_bot"
victim = reply_message.sender.id
if reply_message.sender.bot:
await eor(mafiaevent, "Need actual users. Not Bots")
return
await eor(mafiaevent, "Checking...")
async with mafiaevent.client.conversation(chat) as conv:
try:
response1 = conv.wait_event(events.NewMessage(incoming=True,from_users=461843263))
response2 = conv.wait_event(events.NewMessage(incoming=True,from_users=461843263))
response3 = conv.wait_event(events.NewMessage(incoming=True,from_users=461843263))
await conv.send_message("/search_id {}".format(victim))
response1 = await response1
response2 = await response2
response3 = await response3
except YouBlockedUserError:
await mafiaevent.reply("Please unblock ( @Sangmatainfo_bot ) ")
return
if response1.text.startswith("No records found"):
await eor(mafiaevent, "User never changed his Username...")
else:
await mafiaevent.delete()
await mafiaevent.client.send_message(mafiaevent.chat_id, response2.message)
@mafiabot.on(admin_cmd(pattern="unh ?(.*)"))
@mafiabot.on(sudo_cmd(pattern="unh ?(.*)", allow_sudo=True))
async def _(mafiaevent):
if mafiaevent.fwd_from:
return
if not mafiaevent.reply_to_msg_id:
await eor(mafiaevent, "`Please Reply To A User To Get This Module Work`")
return
reply_message = await mafiaevent.get_reply_message()
chat = "Sangmatainfo_bot"
victim = reply_message.sender.id
if reply_message.sender.bot:
await eor(mafiaevent, "Need actual users. Not Bots")
return
await eor(mafiaevent, "Checking...")
async with mafiaevent.client.conversation(chat) as conv:
try:
response1 = conv.wait_event(events.NewMessage(incoming=True,from_users=461843263))
response2 = conv.wait_event(events.NewMessage(incoming=True,from_users=461843263))
response3 = conv.wait_event(events.NewMessage(incoming=True,from_users=461843263))
await conv.send_message("/search_id {}".format(victim))
response1 = await response1
response2 = await response2
response3 = await response3
except YouBlockedUserError:
await mafiaevent.reply("Please unblock ( @Sangmatainfo_bot ) ")
return
if response1.text.startswith("No records found"):
await eor(mafiaevent, "User never changed his Username...")
else:
await mafiaevent.delete()
await mafiaevent.client.send_message(mafiaevent.chat_id, response3.message)
CmdHelp("history").add_command(
"history", "<reply to a user>", "Fetches the name history of replied user."
).add_command(
"unh", "<reply to user>", "Fetches the Username History of replied users."
).add()
| 46.423077
| 96
| 0.669152
| 415
| 3,621
| 5.703614
| 0.224096
| 0.027038
| 0.060837
| 0.048162
| 0.822983
| 0.78158
| 0.78158
| 0.78158
| 0.78158
| 0.78158
| 0
| 0.027348
| 0.232532
| 3,621
| 77
| 97
| 47.025974
| 0.824397
| 0
| 0
| 0.783784
| 0
| 0
| 0.161281
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.067568
| 0
| 0.175676
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ae632c5667451f8a68d97a905d00a12293503cf
| 367,247
|
py
|
Python
|
thirdparty/mapd/MapD.py
|
mrocklin/pygdf
|
2de9407427da9497ebdf8951a12857be0fab31bb
|
[
"Apache-2.0"
] | 5
|
2019-01-15T12:31:49.000Z
|
2021-03-05T21:17:13.000Z
|
thirdparty/mapd/MapD.py
|
mrocklin/pygdf
|
2de9407427da9497ebdf8951a12857be0fab31bb
|
[
"Apache-2.0"
] | 19
|
2018-07-18T07:15:44.000Z
|
2021-02-22T17:00:18.000Z
|
thirdparty/mapd/MapD.py
|
mrocklin/pygdf
|
2de9407427da9497ebdf8951a12857be0fab31bb
|
[
"Apache-2.0"
] | 2
|
2020-05-01T09:54:34.000Z
|
2021-04-17T10:57:07.000Z
|
#
# Autogenerated by Thrift Compiler (1.0.0-dev)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
class Iface(object):
def connect(self, user, passwd, dbname):
"""
Parameters:
- user
- passwd
- dbname
"""
pass
def disconnect(self, session):
"""
Parameters:
- session
"""
pass
def get_server_status(self, session):
"""
Parameters:
- session
"""
pass
def get_tables(self, session):
"""
Parameters:
- session
"""
pass
def get_table_details(self, session, table_name):
"""
Parameters:
- session
- table_name
"""
pass
def get_users(self, session):
"""
Parameters:
- session
"""
pass
def get_databases(self, session):
"""
Parameters:
- session
"""
pass
def get_version(self):
pass
def start_heap_profile(self, session):
"""
Parameters:
- session
"""
pass
def stop_heap_profile(self, session):
"""
Parameters:
- session
"""
pass
def get_heap_profile(self, session):
"""
Parameters:
- session
"""
pass
def get_memory_gpu(self, session):
"""
Parameters:
- session
"""
pass
def get_memory_cpu(self, session):
"""
Parameters:
- session
"""
pass
def get_memory_summary(self, session):
"""
Parameters:
- session
"""
pass
def clear_cpu_memory(self, session):
"""
Parameters:
- session
"""
pass
def clear_gpu_memory(self, session):
"""
Parameters:
- session
"""
pass
def sql_execute(self, session, query, column_format, nonce, first_n):
"""
Parameters:
- session
- query
- column_format
- nonce
- first_n
"""
pass
def sql_execute_df(self, session, query, device_type, device_id, first_n):
"""
Parameters:
- session
- query
- device_type
- device_id
- first_n
"""
pass
def sql_execute_gdf(self, session, query, device_id, first_n):
"""
Parameters:
- session
- query
- device_id
- first_n
"""
pass
def interrupt(self, session):
"""
Parameters:
- session
"""
pass
def sql_validate(self, session, query):
"""
Parameters:
- session
- query
"""
pass
def set_execution_mode(self, session, mode):
"""
Parameters:
- session
- mode
"""
pass
def render_vega(self, session, widget_id, vega_json, compression_level, nonce):
"""
Parameters:
- session
- widget_id
- vega_json
- compression_level
- nonce
"""
pass
def get_result_row_for_pixel(self, session, widget_id, pixel, table_col_names, column_format, pixelRadius, nonce):
"""
Parameters:
- session
- widget_id
- pixel
- table_col_names
- column_format
- pixelRadius
- nonce
"""
pass
def get_frontend_view(self, session, view_name):
"""
Parameters:
- session
- view_name
"""
pass
def get_frontend_views(self, session):
"""
Parameters:
- session
"""
pass
def create_frontend_view(self, session, view_name, view_state, image_hash, view_metadata):
"""
Parameters:
- session
- view_name
- view_state
- image_hash
- view_metadata
"""
pass
def delete_frontend_view(self, session, view_name):
"""
Parameters:
- session
- view_name
"""
pass
def get_link_view(self, session, link):
"""
Parameters:
- session
- link
"""
pass
def create_link(self, session, view_state, view_metadata):
"""
Parameters:
- session
- view_state
- view_metadata
"""
pass
def load_table_binary(self, session, table_name, rows):
"""
Parameters:
- session
- table_name
- rows
"""
pass
def load_table(self, session, table_name, rows):
"""
Parameters:
- session
- table_name
- rows
"""
pass
def detect_column_types(self, session, file_name, copy_params):
"""
Parameters:
- session
- file_name
- copy_params
"""
pass
def create_table(self, session, table_name, row_desc, table_type):
"""
Parameters:
- session
- table_name
- row_desc
- table_type
"""
pass
def import_table(self, session, table_name, file_name, copy_params):
"""
Parameters:
- session
- table_name
- file_name
- copy_params
"""
pass
def import_geo_table(self, session, table_name, file_name, copy_params, row_desc):
"""
Parameters:
- session
- table_name
- file_name
- copy_params
- row_desc
"""
pass
def import_table_status(self, session, import_id):
"""
Parameters:
- session
- import_id
"""
pass
def start_query(self, session, query_ra, just_explain):
"""
Parameters:
- session
- query_ra
- just_explain
"""
pass
def execute_first_step(self, pending_query):
"""
Parameters:
- pending_query
"""
pass
def broadcast_serialized_rows(self, serialized_rows, row_desc, query_id):
"""
Parameters:
- serialized_rows
- row_desc
- query_id
"""
pass
def render_vega_raw_pixels(self, session, widget_id, node_idx, vega_json):
"""
Parameters:
- session
- widget_id
- node_idx
- vega_json
"""
pass
def insert_data(self, session, insert_data):
"""
Parameters:
- session
- insert_data
"""
pass
def get_table_descriptor(self, session, table_name):
"""
Parameters:
- session
- table_name
"""
pass
def get_row_descriptor(self, session, table_name):
"""
Parameters:
- session
- table_name
"""
pass
def render(self, session, query, render_type, nonce):
"""
Parameters:
- session
- query
- render_type
- nonce
"""
pass
def get_rows_for_pixels(self, session, widget_id, pixels, table_name, col_names, column_format, nonce):
"""
Parameters:
- session
- widget_id
- pixels
- table_name
- col_names
- column_format
- nonce
"""
pass
def get_row_for_pixel(self, session, widget_id, pixel, table_name, col_names, column_format, pixelRadius, nonce):
"""
Parameters:
- session
- widget_id
- pixel
- table_name
- col_names
- column_format
- pixelRadius
- nonce
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def connect(self, user, passwd, dbname):
"""
Parameters:
- user
- passwd
- dbname
"""
self.send_connect(user, passwd, dbname)
return self.recv_connect()
def send_connect(self, user, passwd, dbname):
self._oprot.writeMessageBegin('connect', TMessageType.CALL, self._seqid)
args = connect_args()
args.user = user
args.passwd = passwd
args.dbname = dbname
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_connect(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = connect_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "connect failed: unknown result")
def disconnect(self, session):
"""
Parameters:
- session
"""
self.send_disconnect(session)
self.recv_disconnect()
def send_disconnect(self, session):
self._oprot.writeMessageBegin('disconnect', TMessageType.CALL, self._seqid)
args = disconnect_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_disconnect(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = disconnect_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def get_server_status(self, session):
"""
Parameters:
- session
"""
self.send_get_server_status(session)
return self.recv_get_server_status()
def send_get_server_status(self, session):
self._oprot.writeMessageBegin('get_server_status', TMessageType.CALL, self._seqid)
args = get_server_status_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_server_status(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_server_status_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_server_status failed: unknown result")
def get_tables(self, session):
"""
Parameters:
- session
"""
self.send_get_tables(session)
return self.recv_get_tables()
def send_get_tables(self, session):
self._oprot.writeMessageBegin('get_tables', TMessageType.CALL, self._seqid)
args = get_tables_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_tables(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_tables_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_tables failed: unknown result")
def get_table_details(self, session, table_name):
"""
Parameters:
- session
- table_name
"""
self.send_get_table_details(session, table_name)
return self.recv_get_table_details()
def send_get_table_details(self, session, table_name):
self._oprot.writeMessageBegin('get_table_details', TMessageType.CALL, self._seqid)
args = get_table_details_args()
args.session = session
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_table_details(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_table_details_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_details failed: unknown result")
def get_users(self, session):
"""
Parameters:
- session
"""
self.send_get_users(session)
return self.recv_get_users()
def send_get_users(self, session):
self._oprot.writeMessageBegin('get_users', TMessageType.CALL, self._seqid)
args = get_users_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_users(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_users_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_users failed: unknown result")
def get_databases(self, session):
"""
Parameters:
- session
"""
self.send_get_databases(session)
return self.recv_get_databases()
def send_get_databases(self, session):
self._oprot.writeMessageBegin('get_databases', TMessageType.CALL, self._seqid)
args = get_databases_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_databases(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_databases_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_databases failed: unknown result")
def get_version(self):
self.send_get_version()
return self.recv_get_version()
def send_get_version(self):
self._oprot.writeMessageBegin('get_version', TMessageType.CALL, self._seqid)
args = get_version_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_version(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_version_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_version failed: unknown result")
def start_heap_profile(self, session):
"""
Parameters:
- session
"""
self.send_start_heap_profile(session)
self.recv_start_heap_profile()
def send_start_heap_profile(self, session):
self._oprot.writeMessageBegin('start_heap_profile', TMessageType.CALL, self._seqid)
args = start_heap_profile_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_start_heap_profile(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = start_heap_profile_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def stop_heap_profile(self, session):
"""
Parameters:
- session
"""
self.send_stop_heap_profile(session)
self.recv_stop_heap_profile()
def send_stop_heap_profile(self, session):
self._oprot.writeMessageBegin('stop_heap_profile', TMessageType.CALL, self._seqid)
args = stop_heap_profile_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_stop_heap_profile(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = stop_heap_profile_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def get_heap_profile(self, session):
"""
Parameters:
- session
"""
self.send_get_heap_profile(session)
return self.recv_get_heap_profile()
def send_get_heap_profile(self, session):
self._oprot.writeMessageBegin('get_heap_profile', TMessageType.CALL, self._seqid)
args = get_heap_profile_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_heap_profile(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_heap_profile_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_heap_profile failed: unknown result")
def get_memory_gpu(self, session):
"""
Parameters:
- session
"""
self.send_get_memory_gpu(session)
return self.recv_get_memory_gpu()
def send_get_memory_gpu(self, session):
self._oprot.writeMessageBegin('get_memory_gpu', TMessageType.CALL, self._seqid)
args = get_memory_gpu_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_memory_gpu(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_memory_gpu_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_memory_gpu failed: unknown result")
def get_memory_cpu(self, session):
"""
Parameters:
- session
"""
self.send_get_memory_cpu(session)
return self.recv_get_memory_cpu()
def send_get_memory_cpu(self, session):
self._oprot.writeMessageBegin('get_memory_cpu', TMessageType.CALL, self._seqid)
args = get_memory_cpu_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_memory_cpu(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_memory_cpu_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_memory_cpu failed: unknown result")
def get_memory_summary(self, session):
"""
Parameters:
- session
"""
self.send_get_memory_summary(session)
return self.recv_get_memory_summary()
def send_get_memory_summary(self, session):
self._oprot.writeMessageBegin('get_memory_summary', TMessageType.CALL, self._seqid)
args = get_memory_summary_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_memory_summary(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_memory_summary_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_memory_summary failed: unknown result")
def clear_cpu_memory(self, session):
"""
Parameters:
- session
"""
self.send_clear_cpu_memory(session)
self.recv_clear_cpu_memory()
def send_clear_cpu_memory(self, session):
self._oprot.writeMessageBegin('clear_cpu_memory', TMessageType.CALL, self._seqid)
args = clear_cpu_memory_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_clear_cpu_memory(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = clear_cpu_memory_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def clear_gpu_memory(self, session):
"""
Parameters:
- session
"""
self.send_clear_gpu_memory(session)
self.recv_clear_gpu_memory()
def send_clear_gpu_memory(self, session):
self._oprot.writeMessageBegin('clear_gpu_memory', TMessageType.CALL, self._seqid)
args = clear_gpu_memory_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_clear_gpu_memory(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = clear_gpu_memory_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def sql_execute(self, session, query, column_format, nonce, first_n):
"""
Parameters:
- session
- query
- column_format
- nonce
- first_n
"""
self.send_sql_execute(session, query, column_format, nonce, first_n)
return self.recv_sql_execute()
def send_sql_execute(self, session, query, column_format, nonce, first_n):
self._oprot.writeMessageBegin('sql_execute', TMessageType.CALL, self._seqid)
args = sql_execute_args()
args.session = session
args.query = query
args.column_format = column_format
args.nonce = nonce
args.first_n = first_n
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sql_execute(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = sql_execute_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "sql_execute failed: unknown result")
def sql_execute_df(self, session, query, device_type, device_id, first_n):
"""
Parameters:
- session
- query
- device_type
- device_id
- first_n
"""
self.send_sql_execute_df(session, query, device_type, device_id, first_n)
return self.recv_sql_execute_df()
def send_sql_execute_df(self, session, query, device_type, device_id, first_n):
self._oprot.writeMessageBegin('sql_execute_df', TMessageType.CALL, self._seqid)
args = sql_execute_df_args()
args.session = session
args.query = query
args.device_type = device_type
args.device_id = device_id
args.first_n = first_n
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sql_execute_df(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = sql_execute_df_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "sql_execute_df failed: unknown result")
def sql_execute_gdf(self, session, query, device_id, first_n):
"""
Parameters:
- session
- query
- device_id
- first_n
"""
self.send_sql_execute_gdf(session, query, device_id, first_n)
return self.recv_sql_execute_gdf()
def send_sql_execute_gdf(self, session, query, device_id, first_n):
self._oprot.writeMessageBegin('sql_execute_gdf', TMessageType.CALL, self._seqid)
args = sql_execute_gdf_args()
args.session = session
args.query = query
args.device_id = device_id
args.first_n = first_n
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sql_execute_gdf(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = sql_execute_gdf_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "sql_execute_gdf failed: unknown result")
def interrupt(self, session):
"""
Parameters:
- session
"""
self.send_interrupt(session)
self.recv_interrupt()
def send_interrupt(self, session):
self._oprot.writeMessageBegin('interrupt', TMessageType.CALL, self._seqid)
args = interrupt_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_interrupt(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = interrupt_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def sql_validate(self, session, query):
"""
Parameters:
- session
- query
"""
self.send_sql_validate(session, query)
return self.recv_sql_validate()
def send_sql_validate(self, session, query):
self._oprot.writeMessageBegin('sql_validate', TMessageType.CALL, self._seqid)
args = sql_validate_args()
args.session = session
args.query = query
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sql_validate(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = sql_validate_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "sql_validate failed: unknown result")
def set_execution_mode(self, session, mode):
"""
Parameters:
- session
- mode
"""
self.send_set_execution_mode(session, mode)
self.recv_set_execution_mode()
def send_set_execution_mode(self, session, mode):
self._oprot.writeMessageBegin('set_execution_mode', TMessageType.CALL, self._seqid)
args = set_execution_mode_args()
args.session = session
args.mode = mode
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_execution_mode(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = set_execution_mode_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def render_vega(self, session, widget_id, vega_json, compression_level, nonce):
"""
Parameters:
- session
- widget_id
- vega_json
- compression_level
- nonce
"""
self.send_render_vega(session, widget_id, vega_json, compression_level, nonce)
return self.recv_render_vega()
def send_render_vega(self, session, widget_id, vega_json, compression_level, nonce):
self._oprot.writeMessageBegin('render_vega', TMessageType.CALL, self._seqid)
args = render_vega_args()
args.session = session
args.widget_id = widget_id
args.vega_json = vega_json
args.compression_level = compression_level
args.nonce = nonce
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_render_vega(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = render_vega_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "render_vega failed: unknown result")
def get_result_row_for_pixel(self, session, widget_id, pixel, table_col_names, column_format, pixelRadius, nonce):
"""
Parameters:
- session
- widget_id
- pixel
- table_col_names
- column_format
- pixelRadius
- nonce
"""
self.send_get_result_row_for_pixel(session, widget_id, pixel, table_col_names, column_format, pixelRadius, nonce)
return self.recv_get_result_row_for_pixel()
def send_get_result_row_for_pixel(self, session, widget_id, pixel, table_col_names, column_format, pixelRadius, nonce):
self._oprot.writeMessageBegin('get_result_row_for_pixel', TMessageType.CALL, self._seqid)
args = get_result_row_for_pixel_args()
args.session = session
args.widget_id = widget_id
args.pixel = pixel
args.table_col_names = table_col_names
args.column_format = column_format
args.pixelRadius = pixelRadius
args.nonce = nonce
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_result_row_for_pixel(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_result_row_for_pixel_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_result_row_for_pixel failed: unknown result")
def get_frontend_view(self, session, view_name):
"""
Parameters:
- session
- view_name
"""
self.send_get_frontend_view(session, view_name)
return self.recv_get_frontend_view()
def send_get_frontend_view(self, session, view_name):
self._oprot.writeMessageBegin('get_frontend_view', TMessageType.CALL, self._seqid)
args = get_frontend_view_args()
args.session = session
args.view_name = view_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_frontend_view(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_frontend_view_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_frontend_view failed: unknown result")
def get_frontend_views(self, session):
"""
Parameters:
- session
"""
self.send_get_frontend_views(session)
return self.recv_get_frontend_views()
def send_get_frontend_views(self, session):
self._oprot.writeMessageBegin('get_frontend_views', TMessageType.CALL, self._seqid)
args = get_frontend_views_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_frontend_views(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_frontend_views_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_frontend_views failed: unknown result")
def create_frontend_view(self, session, view_name, view_state, image_hash, view_metadata):
"""
Parameters:
- session
- view_name
- view_state
- image_hash
- view_metadata
"""
self.send_create_frontend_view(session, view_name, view_state, image_hash, view_metadata)
self.recv_create_frontend_view()
def send_create_frontend_view(self, session, view_name, view_state, image_hash, view_metadata):
self._oprot.writeMessageBegin('create_frontend_view', TMessageType.CALL, self._seqid)
args = create_frontend_view_args()
args.session = session
args.view_name = view_name
args.view_state = view_state
args.image_hash = image_hash
args.view_metadata = view_metadata
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_frontend_view(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = create_frontend_view_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def delete_frontend_view(self, session, view_name):
"""
Parameters:
- session
- view_name
"""
self.send_delete_frontend_view(session, view_name)
self.recv_delete_frontend_view()
def send_delete_frontend_view(self, session, view_name):
self._oprot.writeMessageBegin('delete_frontend_view', TMessageType.CALL, self._seqid)
args = delete_frontend_view_args()
args.session = session
args.view_name = view_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_delete_frontend_view(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = delete_frontend_view_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def get_link_view(self, session, link):
"""
Parameters:
- session
- link
"""
self.send_get_link_view(session, link)
return self.recv_get_link_view()
def send_get_link_view(self, session, link):
self._oprot.writeMessageBegin('get_link_view', TMessageType.CALL, self._seqid)
args = get_link_view_args()
args.session = session
args.link = link
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_link_view(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_link_view_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_link_view failed: unknown result")
def create_link(self, session, view_state, view_metadata):
"""
Parameters:
- session
- view_state
- view_metadata
"""
self.send_create_link(session, view_state, view_metadata)
return self.recv_create_link()
def send_create_link(self, session, view_state, view_metadata):
self._oprot.writeMessageBegin('create_link', TMessageType.CALL, self._seqid)
args = create_link_args()
args.session = session
args.view_state = view_state
args.view_metadata = view_metadata
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_link(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = create_link_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "create_link failed: unknown result")
def load_table_binary(self, session, table_name, rows):
"""
Parameters:
- session
- table_name
- rows
"""
self.send_load_table_binary(session, table_name, rows)
self.recv_load_table_binary()
def send_load_table_binary(self, session, table_name, rows):
self._oprot.writeMessageBegin('load_table_binary', TMessageType.CALL, self._seqid)
args = load_table_binary_args()
args.session = session
args.table_name = table_name
args.rows = rows
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_load_table_binary(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = load_table_binary_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def load_table(self, session, table_name, rows):
"""
Parameters:
- session
- table_name
- rows
"""
self.send_load_table(session, table_name, rows)
self.recv_load_table()
def send_load_table(self, session, table_name, rows):
self._oprot.writeMessageBegin('load_table', TMessageType.CALL, self._seqid)
args = load_table_args()
args.session = session
args.table_name = table_name
args.rows = rows
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_load_table(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = load_table_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def detect_column_types(self, session, file_name, copy_params):
"""
Parameters:
- session
- file_name
- copy_params
"""
self.send_detect_column_types(session, file_name, copy_params)
return self.recv_detect_column_types()
def send_detect_column_types(self, session, file_name, copy_params):
self._oprot.writeMessageBegin('detect_column_types', TMessageType.CALL, self._seqid)
args = detect_column_types_args()
args.session = session
args.file_name = file_name
args.copy_params = copy_params
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_detect_column_types(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = detect_column_types_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "detect_column_types failed: unknown result")
def create_table(self, session, table_name, row_desc, table_type):
"""
Parameters:
- session
- table_name
- row_desc
- table_type
"""
self.send_create_table(session, table_name, row_desc, table_type)
self.recv_create_table()
def send_create_table(self, session, table_name, row_desc, table_type):
self._oprot.writeMessageBegin('create_table', TMessageType.CALL, self._seqid)
args = create_table_args()
args.session = session
args.table_name = table_name
args.row_desc = row_desc
args.table_type = table_type
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_table(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = create_table_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def import_table(self, session, table_name, file_name, copy_params):
"""
Parameters:
- session
- table_name
- file_name
- copy_params
"""
self.send_import_table(session, table_name, file_name, copy_params)
self.recv_import_table()
def send_import_table(self, session, table_name, file_name, copy_params):
self._oprot.writeMessageBegin('import_table', TMessageType.CALL, self._seqid)
args = import_table_args()
args.session = session
args.table_name = table_name
args.file_name = file_name
args.copy_params = copy_params
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_import_table(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = import_table_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def import_geo_table(self, session, table_name, file_name, copy_params, row_desc):
"""
Parameters:
- session
- table_name
- file_name
- copy_params
- row_desc
"""
self.send_import_geo_table(session, table_name, file_name, copy_params, row_desc)
self.recv_import_geo_table()
def send_import_geo_table(self, session, table_name, file_name, copy_params, row_desc):
self._oprot.writeMessageBegin('import_geo_table', TMessageType.CALL, self._seqid)
args = import_geo_table_args()
args.session = session
args.table_name = table_name
args.file_name = file_name
args.copy_params = copy_params
args.row_desc = row_desc
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_import_geo_table(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = import_geo_table_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def import_table_status(self, session, import_id):
"""
Parameters:
- session
- import_id
"""
self.send_import_table_status(session, import_id)
return self.recv_import_table_status()
def send_import_table_status(self, session, import_id):
self._oprot.writeMessageBegin('import_table_status', TMessageType.CALL, self._seqid)
args = import_table_status_args()
args.session = session
args.import_id = import_id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_import_table_status(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = import_table_status_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "import_table_status failed: unknown result")
def start_query(self, session, query_ra, just_explain):
"""
Parameters:
- session
- query_ra
- just_explain
"""
self.send_start_query(session, query_ra, just_explain)
return self.recv_start_query()
def send_start_query(self, session, query_ra, just_explain):
self._oprot.writeMessageBegin('start_query', TMessageType.CALL, self._seqid)
args = start_query_args()
args.session = session
args.query_ra = query_ra
args.just_explain = just_explain
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_start_query(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = start_query_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "start_query failed: unknown result")
def execute_first_step(self, pending_query):
"""
Parameters:
- pending_query
"""
self.send_execute_first_step(pending_query)
return self.recv_execute_first_step()
def send_execute_first_step(self, pending_query):
self._oprot.writeMessageBegin('execute_first_step', TMessageType.CALL, self._seqid)
args = execute_first_step_args()
args.pending_query = pending_query
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_execute_first_step(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = execute_first_step_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "execute_first_step failed: unknown result")
def broadcast_serialized_rows(self, serialized_rows, row_desc, query_id):
"""
Parameters:
- serialized_rows
- row_desc
- query_id
"""
self.send_broadcast_serialized_rows(serialized_rows, row_desc, query_id)
self.recv_broadcast_serialized_rows()
def send_broadcast_serialized_rows(self, serialized_rows, row_desc, query_id):
self._oprot.writeMessageBegin('broadcast_serialized_rows', TMessageType.CALL, self._seqid)
args = broadcast_serialized_rows_args()
args.serialized_rows = serialized_rows
args.row_desc = row_desc
args.query_id = query_id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_broadcast_serialized_rows(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = broadcast_serialized_rows_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def render_vega_raw_pixels(self, session, widget_id, node_idx, vega_json):
"""
Parameters:
- session
- widget_id
- node_idx
- vega_json
"""
self.send_render_vega_raw_pixels(session, widget_id, node_idx, vega_json)
return self.recv_render_vega_raw_pixels()
def send_render_vega_raw_pixels(self, session, widget_id, node_idx, vega_json):
self._oprot.writeMessageBegin('render_vega_raw_pixels', TMessageType.CALL, self._seqid)
args = render_vega_raw_pixels_args()
args.session = session
args.widget_id = widget_id
args.node_idx = node_idx
args.vega_json = vega_json
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_render_vega_raw_pixels(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = render_vega_raw_pixels_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "render_vega_raw_pixels failed: unknown result")
def insert_data(self, session, insert_data):
"""
Parameters:
- session
- insert_data
"""
self.send_insert_data(session, insert_data)
self.recv_insert_data()
def send_insert_data(self, session, insert_data):
self._oprot.writeMessageBegin('insert_data', TMessageType.CALL, self._seqid)
args = insert_data_args()
args.session = session
args.insert_data = insert_data
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_insert_data(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = insert_data_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def get_table_descriptor(self, session, table_name):
"""
Parameters:
- session
- table_name
"""
self.send_get_table_descriptor(session, table_name)
return self.recv_get_table_descriptor()
def send_get_table_descriptor(self, session, table_name):
self._oprot.writeMessageBegin('get_table_descriptor', TMessageType.CALL, self._seqid)
args = get_table_descriptor_args()
args.session = session
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_table_descriptor(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_table_descriptor_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_descriptor failed: unknown result")
def get_row_descriptor(self, session, table_name):
"""
Parameters:
- session
- table_name
"""
self.send_get_row_descriptor(session, table_name)
return self.recv_get_row_descriptor()
def send_get_row_descriptor(self, session, table_name):
self._oprot.writeMessageBegin('get_row_descriptor', TMessageType.CALL, self._seqid)
args = get_row_descriptor_args()
args.session = session
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_row_descriptor(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_row_descriptor_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_row_descriptor failed: unknown result")
def render(self, session, query, render_type, nonce):
"""
Parameters:
- session
- query
- render_type
- nonce
"""
self.send_render(session, query, render_type, nonce)
return self.recv_render()
def send_render(self, session, query, render_type, nonce):
self._oprot.writeMessageBegin('render', TMessageType.CALL, self._seqid)
args = render_args()
args.session = session
args.query = query
args.render_type = render_type
args.nonce = nonce
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_render(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = render_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "render failed: unknown result")
def get_rows_for_pixels(self, session, widget_id, pixels, table_name, col_names, column_format, nonce):
"""
Parameters:
- session
- widget_id
- pixels
- table_name
- col_names
- column_format
- nonce
"""
self.send_get_rows_for_pixels(session, widget_id, pixels, table_name, col_names, column_format, nonce)
return self.recv_get_rows_for_pixels()
def send_get_rows_for_pixels(self, session, widget_id, pixels, table_name, col_names, column_format, nonce):
self._oprot.writeMessageBegin('get_rows_for_pixels', TMessageType.CALL, self._seqid)
args = get_rows_for_pixels_args()
args.session = session
args.widget_id = widget_id
args.pixels = pixels
args.table_name = table_name
args.col_names = col_names
args.column_format = column_format
args.nonce = nonce
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_rows_for_pixels(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_rows_for_pixels_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_rows_for_pixels failed: unknown result")
def get_row_for_pixel(self, session, widget_id, pixel, table_name, col_names, column_format, pixelRadius, nonce):
"""
Parameters:
- session
- widget_id
- pixel
- table_name
- col_names
- column_format
- pixelRadius
- nonce
"""
self.send_get_row_for_pixel(session, widget_id, pixel, table_name, col_names, column_format, pixelRadius, nonce)
return self.recv_get_row_for_pixel()
def send_get_row_for_pixel(self, session, widget_id, pixel, table_name, col_names, column_format, pixelRadius, nonce):
self._oprot.writeMessageBegin('get_row_for_pixel', TMessageType.CALL, self._seqid)
args = get_row_for_pixel_args()
args.session = session
args.widget_id = widget_id
args.pixel = pixel
args.table_name = table_name
args.col_names = col_names
args.column_format = column_format
args.pixelRadius = pixelRadius
args.nonce = nonce
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_row_for_pixel(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_row_for_pixel_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_row_for_pixel failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["connect"] = Processor.process_connect
self._processMap["disconnect"] = Processor.process_disconnect
self._processMap["get_server_status"] = Processor.process_get_server_status
self._processMap["get_tables"] = Processor.process_get_tables
self._processMap["get_table_details"] = Processor.process_get_table_details
self._processMap["get_users"] = Processor.process_get_users
self._processMap["get_databases"] = Processor.process_get_databases
self._processMap["get_version"] = Processor.process_get_version
self._processMap["start_heap_profile"] = Processor.process_start_heap_profile
self._processMap["stop_heap_profile"] = Processor.process_stop_heap_profile
self._processMap["get_heap_profile"] = Processor.process_get_heap_profile
self._processMap["get_memory_gpu"] = Processor.process_get_memory_gpu
self._processMap["get_memory_cpu"] = Processor.process_get_memory_cpu
self._processMap["get_memory_summary"] = Processor.process_get_memory_summary
self._processMap["clear_cpu_memory"] = Processor.process_clear_cpu_memory
self._processMap["clear_gpu_memory"] = Processor.process_clear_gpu_memory
self._processMap["sql_execute"] = Processor.process_sql_execute
self._processMap["sql_execute_df"] = Processor.process_sql_execute_df
self._processMap["sql_execute_gdf"] = Processor.process_sql_execute_gdf
self._processMap["interrupt"] = Processor.process_interrupt
self._processMap["sql_validate"] = Processor.process_sql_validate
self._processMap["set_execution_mode"] = Processor.process_set_execution_mode
self._processMap["render_vega"] = Processor.process_render_vega
self._processMap["get_result_row_for_pixel"] = Processor.process_get_result_row_for_pixel
self._processMap["get_frontend_view"] = Processor.process_get_frontend_view
self._processMap["get_frontend_views"] = Processor.process_get_frontend_views
self._processMap["create_frontend_view"] = Processor.process_create_frontend_view
self._processMap["delete_frontend_view"] = Processor.process_delete_frontend_view
self._processMap["get_link_view"] = Processor.process_get_link_view
self._processMap["create_link"] = Processor.process_create_link
self._processMap["load_table_binary"] = Processor.process_load_table_binary
self._processMap["load_table"] = Processor.process_load_table
self._processMap["detect_column_types"] = Processor.process_detect_column_types
self._processMap["create_table"] = Processor.process_create_table
self._processMap["import_table"] = Processor.process_import_table
self._processMap["import_geo_table"] = Processor.process_import_geo_table
self._processMap["import_table_status"] = Processor.process_import_table_status
self._processMap["start_query"] = Processor.process_start_query
self._processMap["execute_first_step"] = Processor.process_execute_first_step
self._processMap["broadcast_serialized_rows"] = Processor.process_broadcast_serialized_rows
self._processMap["render_vega_raw_pixels"] = Processor.process_render_vega_raw_pixels
self._processMap["insert_data"] = Processor.process_insert_data
self._processMap["get_table_descriptor"] = Processor.process_get_table_descriptor
self._processMap["get_row_descriptor"] = Processor.process_get_row_descriptor
self._processMap["render"] = Processor.process_render
self._processMap["get_rows_for_pixels"] = Processor.process_get_rows_for_pixels
self._processMap["get_row_for_pixel"] = Processor.process_get_row_for_pixel
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_connect(self, seqid, iprot, oprot):
args = connect_args()
args.read(iprot)
iprot.readMessageEnd()
result = connect_result()
try:
result.success = self._handler.connect(args.user, args.passwd, args.dbname)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("connect", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_disconnect(self, seqid, iprot, oprot):
args = disconnect_args()
args.read(iprot)
iprot.readMessageEnd()
result = disconnect_result()
try:
self._handler.disconnect(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("disconnect", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_server_status(self, seqid, iprot, oprot):
args = get_server_status_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_server_status_result()
try:
result.success = self._handler.get_server_status(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_server_status", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_tables(self, seqid, iprot, oprot):
args = get_tables_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_tables_result()
try:
result.success = self._handler.get_tables(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_tables", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_table_details(self, seqid, iprot, oprot):
args = get_table_details_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_table_details_result()
try:
result.success = self._handler.get_table_details(args.session, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_table_details", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_users(self, seqid, iprot, oprot):
args = get_users_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_users_result()
try:
result.success = self._handler.get_users(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_users", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_databases(self, seqid, iprot, oprot):
args = get_databases_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_databases_result()
try:
result.success = self._handler.get_databases(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_databases", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_version(self, seqid, iprot, oprot):
args = get_version_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_version_result()
try:
result.success = self._handler.get_version()
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_version", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_start_heap_profile(self, seqid, iprot, oprot):
args = start_heap_profile_args()
args.read(iprot)
iprot.readMessageEnd()
result = start_heap_profile_result()
try:
self._handler.start_heap_profile(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("start_heap_profile", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_stop_heap_profile(self, seqid, iprot, oprot):
args = stop_heap_profile_args()
args.read(iprot)
iprot.readMessageEnd()
result = stop_heap_profile_result()
try:
self._handler.stop_heap_profile(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("stop_heap_profile", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_heap_profile(self, seqid, iprot, oprot):
args = get_heap_profile_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_heap_profile_result()
try:
result.success = self._handler.get_heap_profile(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_heap_profile", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_memory_gpu(self, seqid, iprot, oprot):
args = get_memory_gpu_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_memory_gpu_result()
try:
result.success = self._handler.get_memory_gpu(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_memory_gpu", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_memory_cpu(self, seqid, iprot, oprot):
args = get_memory_cpu_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_memory_cpu_result()
try:
result.success = self._handler.get_memory_cpu(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_memory_cpu", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_memory_summary(self, seqid, iprot, oprot):
args = get_memory_summary_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_memory_summary_result()
try:
result.success = self._handler.get_memory_summary(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_memory_summary", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_clear_cpu_memory(self, seqid, iprot, oprot):
args = clear_cpu_memory_args()
args.read(iprot)
iprot.readMessageEnd()
result = clear_cpu_memory_result()
try:
self._handler.clear_cpu_memory(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("clear_cpu_memory", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_clear_gpu_memory(self, seqid, iprot, oprot):
args = clear_gpu_memory_args()
args.read(iprot)
iprot.readMessageEnd()
result = clear_gpu_memory_result()
try:
self._handler.clear_gpu_memory(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("clear_gpu_memory", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sql_execute(self, seqid, iprot, oprot):
args = sql_execute_args()
args.read(iprot)
iprot.readMessageEnd()
result = sql_execute_result()
try:
result.success = self._handler.sql_execute(args.session, args.query, args.column_format, args.nonce, args.first_n)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("sql_execute", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sql_execute_df(self, seqid, iprot, oprot):
args = sql_execute_df_args()
args.read(iprot)
iprot.readMessageEnd()
result = sql_execute_df_result()
try:
result.success = self._handler.sql_execute_df(args.session, args.query, args.device_type, args.device_id, args.first_n)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("sql_execute_df", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sql_execute_gdf(self, seqid, iprot, oprot):
args = sql_execute_gdf_args()
args.read(iprot)
iprot.readMessageEnd()
result = sql_execute_gdf_result()
try:
result.success = self._handler.sql_execute_gdf(args.session, args.query, args.device_id, args.first_n)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("sql_execute_gdf", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_interrupt(self, seqid, iprot, oprot):
args = interrupt_args()
args.read(iprot)
iprot.readMessageEnd()
result = interrupt_result()
try:
self._handler.interrupt(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("interrupt", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sql_validate(self, seqid, iprot, oprot):
args = sql_validate_args()
args.read(iprot)
iprot.readMessageEnd()
result = sql_validate_result()
try:
result.success = self._handler.sql_validate(args.session, args.query)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("sql_validate", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_execution_mode(self, seqid, iprot, oprot):
args = set_execution_mode_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_execution_mode_result()
try:
self._handler.set_execution_mode(args.session, args.mode)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("set_execution_mode", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_render_vega(self, seqid, iprot, oprot):
args = render_vega_args()
args.read(iprot)
iprot.readMessageEnd()
result = render_vega_result()
try:
result.success = self._handler.render_vega(args.session, args.widget_id, args.vega_json, args.compression_level, args.nonce)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("render_vega", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_result_row_for_pixel(self, seqid, iprot, oprot):
args = get_result_row_for_pixel_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_result_row_for_pixel_result()
try:
result.success = self._handler.get_result_row_for_pixel(args.session, args.widget_id, args.pixel, args.table_col_names, args.column_format, args.pixelRadius, args.nonce)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_result_row_for_pixel", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_frontend_view(self, seqid, iprot, oprot):
args = get_frontend_view_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_frontend_view_result()
try:
result.success = self._handler.get_frontend_view(args.session, args.view_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_frontend_view", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_frontend_views(self, seqid, iprot, oprot):
args = get_frontend_views_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_frontend_views_result()
try:
result.success = self._handler.get_frontend_views(args.session)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_frontend_views", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_frontend_view(self, seqid, iprot, oprot):
args = create_frontend_view_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_frontend_view_result()
try:
self._handler.create_frontend_view(args.session, args.view_name, args.view_state, args.image_hash, args.view_metadata)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("create_frontend_view", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_delete_frontend_view(self, seqid, iprot, oprot):
args = delete_frontend_view_args()
args.read(iprot)
iprot.readMessageEnd()
result = delete_frontend_view_result()
try:
self._handler.delete_frontend_view(args.session, args.view_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("delete_frontend_view", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_link_view(self, seqid, iprot, oprot):
args = get_link_view_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_link_view_result()
try:
result.success = self._handler.get_link_view(args.session, args.link)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_link_view", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_link(self, seqid, iprot, oprot):
args = create_link_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_link_result()
try:
result.success = self._handler.create_link(args.session, args.view_state, args.view_metadata)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("create_link", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_load_table_binary(self, seqid, iprot, oprot):
args = load_table_binary_args()
args.read(iprot)
iprot.readMessageEnd()
result = load_table_binary_result()
try:
self._handler.load_table_binary(args.session, args.table_name, args.rows)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("load_table_binary", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_load_table(self, seqid, iprot, oprot):
args = load_table_args()
args.read(iprot)
iprot.readMessageEnd()
result = load_table_result()
try:
self._handler.load_table(args.session, args.table_name, args.rows)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("load_table", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_detect_column_types(self, seqid, iprot, oprot):
args = detect_column_types_args()
args.read(iprot)
iprot.readMessageEnd()
result = detect_column_types_result()
try:
result.success = self._handler.detect_column_types(args.session, args.file_name, args.copy_params)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("detect_column_types", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_table(self, seqid, iprot, oprot):
args = create_table_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_table_result()
try:
self._handler.create_table(args.session, args.table_name, args.row_desc, args.table_type)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("create_table", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_import_table(self, seqid, iprot, oprot):
args = import_table_args()
args.read(iprot)
iprot.readMessageEnd()
result = import_table_result()
try:
self._handler.import_table(args.session, args.table_name, args.file_name, args.copy_params)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("import_table", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_import_geo_table(self, seqid, iprot, oprot):
args = import_geo_table_args()
args.read(iprot)
iprot.readMessageEnd()
result = import_geo_table_result()
try:
self._handler.import_geo_table(args.session, args.table_name, args.file_name, args.copy_params, args.row_desc)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("import_geo_table", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_import_table_status(self, seqid, iprot, oprot):
args = import_table_status_args()
args.read(iprot)
iprot.readMessageEnd()
result = import_table_status_result()
try:
result.success = self._handler.import_table_status(args.session, args.import_id)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("import_table_status", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_start_query(self, seqid, iprot, oprot):
args = start_query_args()
args.read(iprot)
iprot.readMessageEnd()
result = start_query_result()
try:
result.success = self._handler.start_query(args.session, args.query_ra, args.just_explain)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("start_query", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_execute_first_step(self, seqid, iprot, oprot):
args = execute_first_step_args()
args.read(iprot)
iprot.readMessageEnd()
result = execute_first_step_result()
try:
result.success = self._handler.execute_first_step(args.pending_query)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("execute_first_step", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_broadcast_serialized_rows(self, seqid, iprot, oprot):
args = broadcast_serialized_rows_args()
args.read(iprot)
iprot.readMessageEnd()
result = broadcast_serialized_rows_result()
try:
self._handler.broadcast_serialized_rows(args.serialized_rows, args.row_desc, args.query_id)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("broadcast_serialized_rows", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_render_vega_raw_pixels(self, seqid, iprot, oprot):
args = render_vega_raw_pixels_args()
args.read(iprot)
iprot.readMessageEnd()
result = render_vega_raw_pixels_result()
try:
result.success = self._handler.render_vega_raw_pixels(args.session, args.widget_id, args.node_idx, args.vega_json)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("render_vega_raw_pixels", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_insert_data(self, seqid, iprot, oprot):
args = insert_data_args()
args.read(iprot)
iprot.readMessageEnd()
result = insert_data_result()
try:
self._handler.insert_data(args.session, args.insert_data)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("insert_data", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_table_descriptor(self, seqid, iprot, oprot):
args = get_table_descriptor_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_table_descriptor_result()
try:
result.success = self._handler.get_table_descriptor(args.session, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_table_descriptor", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_row_descriptor(self, seqid, iprot, oprot):
args = get_row_descriptor_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_row_descriptor_result()
try:
result.success = self._handler.get_row_descriptor(args.session, args.table_name)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_row_descriptor", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_render(self, seqid, iprot, oprot):
args = render_args()
args.read(iprot)
iprot.readMessageEnd()
result = render_result()
try:
result.success = self._handler.render(args.session, args.query, args.render_type, args.nonce)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("render", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_rows_for_pixels(self, seqid, iprot, oprot):
args = get_rows_for_pixels_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_rows_for_pixels_result()
try:
result.success = self._handler.get_rows_for_pixels(args.session, args.widget_id, args.pixels, args.table_name, args.col_names, args.column_format, args.nonce)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_rows_for_pixels", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_row_for_pixel(self, seqid, iprot, oprot):
args = get_row_for_pixel_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_row_for_pixel_result()
try:
result.success = self._handler.get_row_for_pixel(args.session, args.widget_id, args.pixel, args.table_name, args.col_names, args.column_format, args.pixelRadius, args.nonce)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TMapDException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get_row_for_pixel", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class connect_args(object):
"""
Attributes:
- user
- passwd
- dbname
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'user', 'UTF8', None, ), # 1
(2, TType.STRING, 'passwd', 'UTF8', None, ), # 2
(3, TType.STRING, 'dbname', 'UTF8', None, ), # 3
)
def __init__(self, user=None, passwd=None, dbname=None,):
self.user = user
self.passwd = passwd
self.dbname = dbname
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.passwd = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.dbname = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('connect_args')
if self.user is not None:
oprot.writeFieldBegin('user', TType.STRING, 1)
oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user)
oprot.writeFieldEnd()
if self.passwd is not None:
oprot.writeFieldBegin('passwd', TType.STRING, 2)
oprot.writeString(self.passwd.encode('utf-8') if sys.version_info[0] == 2 else self.passwd)
oprot.writeFieldEnd()
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 3)
oprot.writeString(self.dbname.encode('utf-8') if sys.version_info[0] == 2 else self.dbname)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class connect_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('connect_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class disconnect_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('disconnect_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class disconnect_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('disconnect_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_server_status_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_server_status_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_server_status_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TServerStatus, TServerStatus.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TServerStatus()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_server_status_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_tables_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_tables_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_tables_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype159, _size156) = iprot.readListBegin()
for _i160 in range(_size156):
_elem161 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.success.append(_elem161)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_tables_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter162 in self.success:
oprot.writeString(iter162.encode('utf-8') if sys.version_info[0] == 2 else iter162)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_details_args(object):
"""
Attributes:
- session
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'table_name', 'UTF8', None, ), # 2
)
def __init__(self, session=None, table_name=None,):
self.session = session
self.table_name = table_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_details_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_details_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TTableDetails, TTableDetails.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TTableDetails()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_details_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_users_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_users_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_users_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype166, _size163) = iprot.readListBegin()
for _i167 in range(_size163):
_elem168 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.success.append(_elem168)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_users_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter169 in self.success:
oprot.writeString(iter169.encode('utf-8') if sys.version_info[0] == 2 else iter169)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_databases_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_databases_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_databases_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, (TDBInfo, TDBInfo.thrift_spec), False), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype173, _size170) = iprot.readListBegin()
for _i174 in range(_size170):
_elem175 = TDBInfo()
_elem175.read(iprot)
self.success.append(_elem175)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_databases_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter176 in self.success:
iter176.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_version_args(object):
thrift_spec = (
)
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_version_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_version_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_version_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class start_heap_profile_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('start_heap_profile_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class start_heap_profile_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('start_heap_profile_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class stop_heap_profile_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('stop_heap_profile_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class stop_heap_profile_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('stop_heap_profile_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_heap_profile_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_heap_profile_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_heap_profile_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_heap_profile_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_memory_gpu_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_memory_gpu_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_memory_gpu_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_memory_gpu_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_memory_cpu_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_memory_cpu_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_memory_cpu_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_memory_cpu_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_memory_summary_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_memory_summary_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_memory_summary_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TMemorySummary, TMemorySummary.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TMemorySummary()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_memory_summary_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class clear_cpu_memory_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('clear_cpu_memory_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class clear_cpu_memory_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('clear_cpu_memory_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class clear_gpu_memory_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('clear_gpu_memory_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class clear_gpu_memory_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('clear_gpu_memory_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sql_execute_args(object):
"""
Attributes:
- session
- query
- column_format
- nonce
- first_n
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'query', 'UTF8', None, ), # 2
(3, TType.BOOL, 'column_format', None, None, ), # 3
(4, TType.STRING, 'nonce', 'UTF8', None, ), # 4
(5, TType.I32, 'first_n', None, -1, ), # 5
)
def __init__(self, session=None, query=None, column_format=None, nonce=None, first_n=thrift_spec[5][4],):
self.session = session
self.query = query
self.column_format = column_format
self.nonce = nonce
self.first_n = first_n
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.query = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.column_format = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.nonce = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.first_n = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sql_execute_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.query is not None:
oprot.writeFieldBegin('query', TType.STRING, 2)
oprot.writeString(self.query.encode('utf-8') if sys.version_info[0] == 2 else self.query)
oprot.writeFieldEnd()
if self.column_format is not None:
oprot.writeFieldBegin('column_format', TType.BOOL, 3)
oprot.writeBool(self.column_format)
oprot.writeFieldEnd()
if self.nonce is not None:
oprot.writeFieldBegin('nonce', TType.STRING, 4)
oprot.writeString(self.nonce.encode('utf-8') if sys.version_info[0] == 2 else self.nonce)
oprot.writeFieldEnd()
if self.first_n is not None:
oprot.writeFieldBegin('first_n', TType.I32, 5)
oprot.writeI32(self.first_n)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sql_execute_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TQueryResult, TQueryResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TQueryResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sql_execute_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sql_execute_df_args(object):
"""
Attributes:
- session
- query
- device_type
- device_id
- first_n
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'query', 'UTF8', None, ), # 2
(3, TType.I32, 'device_type', None, None, ), # 3
(4, TType.I32, 'device_id', None, 0, ), # 4
(5, TType.I32, 'first_n', None, -1, ), # 5
)
def __init__(self, session=None, query=None, device_type=None, device_id=thrift_spec[4][4], first_n=thrift_spec[5][4],):
self.session = session
self.query = query
self.device_type = device_type
self.device_id = device_id
self.first_n = first_n
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.query = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.device_type = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.device_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.first_n = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sql_execute_df_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.query is not None:
oprot.writeFieldBegin('query', TType.STRING, 2)
oprot.writeString(self.query.encode('utf-8') if sys.version_info[0] == 2 else self.query)
oprot.writeFieldEnd()
if self.device_type is not None:
oprot.writeFieldBegin('device_type', TType.I32, 3)
oprot.writeI32(self.device_type)
oprot.writeFieldEnd()
if self.device_id is not None:
oprot.writeFieldBegin('device_id', TType.I32, 4)
oprot.writeI32(self.device_id)
oprot.writeFieldEnd()
if self.first_n is not None:
oprot.writeFieldBegin('first_n', TType.I32, 5)
oprot.writeI32(self.first_n)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sql_execute_df_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TDataFrame, TDataFrame.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TDataFrame()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sql_execute_df_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sql_execute_gdf_args(object):
"""
Attributes:
- session
- query
- device_id
- first_n
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'query', 'UTF8', None, ), # 2
(3, TType.I32, 'device_id', None, 0, ), # 3
(4, TType.I32, 'first_n', None, -1, ), # 4
)
def __init__(self, session=None, query=None, device_id=thrift_spec[3][4], first_n=thrift_spec[4][4],):
self.session = session
self.query = query
self.device_id = device_id
self.first_n = first_n
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.query = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.device_id = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.first_n = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sql_execute_gdf_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.query is not None:
oprot.writeFieldBegin('query', TType.STRING, 2)
oprot.writeString(self.query.encode('utf-8') if sys.version_info[0] == 2 else self.query)
oprot.writeFieldEnd()
if self.device_id is not None:
oprot.writeFieldBegin('device_id', TType.I32, 3)
oprot.writeI32(self.device_id)
oprot.writeFieldEnd()
if self.first_n is not None:
oprot.writeFieldBegin('first_n', TType.I32, 4)
oprot.writeI32(self.first_n)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sql_execute_gdf_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TDataFrame, TDataFrame.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TDataFrame()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sql_execute_gdf_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class interrupt_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('interrupt_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class interrupt_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('interrupt_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sql_validate_args(object):
"""
Attributes:
- session
- query
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'query', 'UTF8', None, ), # 2
)
def __init__(self, session=None, query=None,):
self.session = session
self.query = query
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.query = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sql_validate_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.query is not None:
oprot.writeFieldBegin('query', TType.STRING, 2)
oprot.writeString(self.query.encode('utf-8') if sys.version_info[0] == 2 else self.query)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sql_validate_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRUCT, (TColumnType, TColumnType.thrift_spec), False), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype178, _vtype179, _size177) = iprot.readMapBegin()
for _i181 in range(_size177):
_key182 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val183 = TColumnType()
_val183.read(iprot)
self.success[_key182] = _val183
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sql_validate_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.success))
for kiter184, viter185 in self.success.items():
oprot.writeString(kiter184.encode('utf-8') if sys.version_info[0] == 2 else kiter184)
viter185.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_execution_mode_args(object):
"""
Attributes:
- session
- mode
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.I32, 'mode', None, None, ), # 2
)
def __init__(self, session=None, mode=None,):
self.session = session
self.mode = mode
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.mode = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('set_execution_mode_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.mode is not None:
oprot.writeFieldBegin('mode', TType.I32, 2)
oprot.writeI32(self.mode)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_execution_mode_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('set_execution_mode_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class render_vega_args(object):
"""
Attributes:
- session
- widget_id
- vega_json
- compression_level
- nonce
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.I64, 'widget_id', None, None, ), # 2
(3, TType.STRING, 'vega_json', 'UTF8', None, ), # 3
(4, TType.I32, 'compression_level', None, None, ), # 4
(5, TType.STRING, 'nonce', 'UTF8', None, ), # 5
)
def __init__(self, session=None, widget_id=None, vega_json=None, compression_level=None, nonce=None,):
self.session = session
self.widget_id = widget_id
self.vega_json = vega_json
self.compression_level = compression_level
self.nonce = nonce
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.widget_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.vega_json = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.compression_level = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.nonce = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('render_vega_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.widget_id is not None:
oprot.writeFieldBegin('widget_id', TType.I64, 2)
oprot.writeI64(self.widget_id)
oprot.writeFieldEnd()
if self.vega_json is not None:
oprot.writeFieldBegin('vega_json', TType.STRING, 3)
oprot.writeString(self.vega_json.encode('utf-8') if sys.version_info[0] == 2 else self.vega_json)
oprot.writeFieldEnd()
if self.compression_level is not None:
oprot.writeFieldBegin('compression_level', TType.I32, 4)
oprot.writeI32(self.compression_level)
oprot.writeFieldEnd()
if self.nonce is not None:
oprot.writeFieldBegin('nonce', TType.STRING, 5)
oprot.writeString(self.nonce.encode('utf-8') if sys.version_info[0] == 2 else self.nonce)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class render_vega_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TRenderResult, TRenderResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TRenderResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('render_vega_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_result_row_for_pixel_args(object):
"""
Attributes:
- session
- widget_id
- pixel
- table_col_names
- column_format
- pixelRadius
- nonce
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.I64, 'widget_id', None, None, ), # 2
(3, TType.STRUCT, 'pixel', (TPixel, TPixel.thrift_spec), None, ), # 3
(4, TType.MAP, 'table_col_names', (TType.STRING, 'UTF8', TType.LIST, (TType.STRING, 'UTF8', False), False), None, ), # 4
(5, TType.BOOL, 'column_format', None, None, ), # 5
(6, TType.I32, 'pixelRadius', None, None, ), # 6
(7, TType.STRING, 'nonce', 'UTF8', None, ), # 7
)
def __init__(self, session=None, widget_id=None, pixel=None, table_col_names=None, column_format=None, pixelRadius=None, nonce=None,):
self.session = session
self.widget_id = widget_id
self.pixel = pixel
self.table_col_names = table_col_names
self.column_format = column_format
self.pixelRadius = pixelRadius
self.nonce = nonce
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.widget_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.pixel = TPixel()
self.pixel.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.table_col_names = {}
(_ktype187, _vtype188, _size186) = iprot.readMapBegin()
for _i190 in range(_size186):
_key191 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val192 = []
(_etype196, _size193) = iprot.readListBegin()
for _i197 in range(_size193):
_elem198 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val192.append(_elem198)
iprot.readListEnd()
self.table_col_names[_key191] = _val192
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.BOOL:
self.column_format = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I32:
self.pixelRadius = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.nonce = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_result_row_for_pixel_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.widget_id is not None:
oprot.writeFieldBegin('widget_id', TType.I64, 2)
oprot.writeI64(self.widget_id)
oprot.writeFieldEnd()
if self.pixel is not None:
oprot.writeFieldBegin('pixel', TType.STRUCT, 3)
self.pixel.write(oprot)
oprot.writeFieldEnd()
if self.table_col_names is not None:
oprot.writeFieldBegin('table_col_names', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.LIST, len(self.table_col_names))
for kiter199, viter200 in self.table_col_names.items():
oprot.writeString(kiter199.encode('utf-8') if sys.version_info[0] == 2 else kiter199)
oprot.writeListBegin(TType.STRING, len(viter200))
for iter201 in viter200:
oprot.writeString(iter201.encode('utf-8') if sys.version_info[0] == 2 else iter201)
oprot.writeListEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.column_format is not None:
oprot.writeFieldBegin('column_format', TType.BOOL, 5)
oprot.writeBool(self.column_format)
oprot.writeFieldEnd()
if self.pixelRadius is not None:
oprot.writeFieldBegin('pixelRadius', TType.I32, 6)
oprot.writeI32(self.pixelRadius)
oprot.writeFieldEnd()
if self.nonce is not None:
oprot.writeFieldBegin('nonce', TType.STRING, 7)
oprot.writeString(self.nonce.encode('utf-8') if sys.version_info[0] == 2 else self.nonce)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_result_row_for_pixel_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TPixelTableRowResult, TPixelTableRowResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TPixelTableRowResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_result_row_for_pixel_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_frontend_view_args(object):
"""
Attributes:
- session
- view_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'view_name', 'UTF8', None, ), # 2
)
def __init__(self, session=None, view_name=None,):
self.session = session
self.view_name = view_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.view_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_frontend_view_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.view_name is not None:
oprot.writeFieldBegin('view_name', TType.STRING, 2)
oprot.writeString(self.view_name.encode('utf-8') if sys.version_info[0] == 2 else self.view_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_frontend_view_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TFrontendView, TFrontendView.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TFrontendView()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_frontend_view_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_frontend_views_args(object):
"""
Attributes:
- session
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_frontend_views_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_frontend_views_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, (TFrontendView, TFrontendView.thrift_spec), False), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype205, _size202) = iprot.readListBegin()
for _i206 in range(_size202):
_elem207 = TFrontendView()
_elem207.read(iprot)
self.success.append(_elem207)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_frontend_views_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter208 in self.success:
iter208.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_frontend_view_args(object):
"""
Attributes:
- session
- view_name
- view_state
- image_hash
- view_metadata
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'view_name', 'UTF8', None, ), # 2
(3, TType.STRING, 'view_state', 'UTF8', None, ), # 3
(4, TType.STRING, 'image_hash', 'UTF8', None, ), # 4
(5, TType.STRING, 'view_metadata', 'UTF8', None, ), # 5
)
def __init__(self, session=None, view_name=None, view_state=None, image_hash=None, view_metadata=None,):
self.session = session
self.view_name = view_name
self.view_state = view_state
self.image_hash = image_hash
self.view_metadata = view_metadata
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.view_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.view_state = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.image_hash = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.view_metadata = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_frontend_view_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.view_name is not None:
oprot.writeFieldBegin('view_name', TType.STRING, 2)
oprot.writeString(self.view_name.encode('utf-8') if sys.version_info[0] == 2 else self.view_name)
oprot.writeFieldEnd()
if self.view_state is not None:
oprot.writeFieldBegin('view_state', TType.STRING, 3)
oprot.writeString(self.view_state.encode('utf-8') if sys.version_info[0] == 2 else self.view_state)
oprot.writeFieldEnd()
if self.image_hash is not None:
oprot.writeFieldBegin('image_hash', TType.STRING, 4)
oprot.writeString(self.image_hash.encode('utf-8') if sys.version_info[0] == 2 else self.image_hash)
oprot.writeFieldEnd()
if self.view_metadata is not None:
oprot.writeFieldBegin('view_metadata', TType.STRING, 5)
oprot.writeString(self.view_metadata.encode('utf-8') if sys.version_info[0] == 2 else self.view_metadata)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_frontend_view_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_frontend_view_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class delete_frontend_view_args(object):
"""
Attributes:
- session
- view_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'view_name', 'UTF8', None, ), # 2
)
def __init__(self, session=None, view_name=None,):
self.session = session
self.view_name = view_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.view_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('delete_frontend_view_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.view_name is not None:
oprot.writeFieldBegin('view_name', TType.STRING, 2)
oprot.writeString(self.view_name.encode('utf-8') if sys.version_info[0] == 2 else self.view_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class delete_frontend_view_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('delete_frontend_view_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_link_view_args(object):
"""
Attributes:
- session
- link
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'link', 'UTF8', None, ), # 2
)
def __init__(self, session=None, link=None,):
self.session = session
self.link = link
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.link = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_link_view_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.link is not None:
oprot.writeFieldBegin('link', TType.STRING, 2)
oprot.writeString(self.link.encode('utf-8') if sys.version_info[0] == 2 else self.link)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_link_view_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TFrontendView, TFrontendView.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TFrontendView()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_link_view_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_link_args(object):
"""
Attributes:
- session
- view_state
- view_metadata
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'view_state', 'UTF8', None, ), # 2
(3, TType.STRING, 'view_metadata', 'UTF8', None, ), # 3
)
def __init__(self, session=None, view_state=None, view_metadata=None,):
self.session = session
self.view_state = view_state
self.view_metadata = view_metadata
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.view_state = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.view_metadata = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_link_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.view_state is not None:
oprot.writeFieldBegin('view_state', TType.STRING, 2)
oprot.writeString(self.view_state.encode('utf-8') if sys.version_info[0] == 2 else self.view_state)
oprot.writeFieldEnd()
if self.view_metadata is not None:
oprot.writeFieldBegin('view_metadata', TType.STRING, 3)
oprot.writeString(self.view_metadata.encode('utf-8') if sys.version_info[0] == 2 else self.view_metadata)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_link_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_link_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class load_table_binary_args(object):
"""
Attributes:
- session
- table_name
- rows
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'table_name', 'UTF8', None, ), # 2
(3, TType.LIST, 'rows', (TType.STRUCT, (TRow, TRow.thrift_spec), False), None, ), # 3
)
def __init__(self, session=None, table_name=None, rows=None,):
self.session = session
self.table_name = table_name
self.rows = rows
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.rows = []
(_etype212, _size209) = iprot.readListBegin()
for _i213 in range(_size209):
_elem214 = TRow()
_elem214.read(iprot)
self.rows.append(_elem214)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('load_table_binary_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
if self.rows is not None:
oprot.writeFieldBegin('rows', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.rows))
for iter215 in self.rows:
iter215.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class load_table_binary_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('load_table_binary_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class load_table_args(object):
"""
Attributes:
- session
- table_name
- rows
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'table_name', 'UTF8', None, ), # 2
(3, TType.LIST, 'rows', (TType.STRUCT, (TStringRow, TStringRow.thrift_spec), False), None, ), # 3
)
def __init__(self, session=None, table_name=None, rows=None,):
self.session = session
self.table_name = table_name
self.rows = rows
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.rows = []
(_etype219, _size216) = iprot.readListBegin()
for _i220 in range(_size216):
_elem221 = TStringRow()
_elem221.read(iprot)
self.rows.append(_elem221)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('load_table_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
if self.rows is not None:
oprot.writeFieldBegin('rows', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.rows))
for iter222 in self.rows:
iter222.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class load_table_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('load_table_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class detect_column_types_args(object):
"""
Attributes:
- session
- file_name
- copy_params
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'file_name', 'UTF8', None, ), # 2
(3, TType.STRUCT, 'copy_params', (TCopyParams, TCopyParams.thrift_spec), None, ), # 3
)
def __init__(self, session=None, file_name=None, copy_params=None,):
self.session = session
self.file_name = file_name
self.copy_params = copy_params
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.file_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.copy_params = TCopyParams()
self.copy_params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('detect_column_types_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.file_name is not None:
oprot.writeFieldBegin('file_name', TType.STRING, 2)
oprot.writeString(self.file_name.encode('utf-8') if sys.version_info[0] == 2 else self.file_name)
oprot.writeFieldEnd()
if self.copy_params is not None:
oprot.writeFieldBegin('copy_params', TType.STRUCT, 3)
self.copy_params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class detect_column_types_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TDetectResult, TDetectResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TDetectResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('detect_column_types_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_table_args(object):
"""
Attributes:
- session
- table_name
- row_desc
- table_type
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'table_name', 'UTF8', None, ), # 2
(3, TType.LIST, 'row_desc', (TType.STRUCT, (TColumnType, TColumnType.thrift_spec), False), None, ), # 3
(4, TType.I32, 'table_type', None, 0, ), # 4
)
def __init__(self, session=None, table_name=None, row_desc=None, table_type=thrift_spec[4][4],):
self.session = session
self.table_name = table_name
self.row_desc = row_desc
self.table_type = table_type
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.row_desc = []
(_etype226, _size223) = iprot.readListBegin()
for _i227 in range(_size223):
_elem228 = TColumnType()
_elem228.read(iprot)
self.row_desc.append(_elem228)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.table_type = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_table_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
if self.row_desc is not None:
oprot.writeFieldBegin('row_desc', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.row_desc))
for iter229 in self.row_desc:
iter229.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.table_type is not None:
oprot.writeFieldBegin('table_type', TType.I32, 4)
oprot.writeI32(self.table_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_table_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_table_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class import_table_args(object):
"""
Attributes:
- session
- table_name
- file_name
- copy_params
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'table_name', 'UTF8', None, ), # 2
(3, TType.STRING, 'file_name', 'UTF8', None, ), # 3
(4, TType.STRUCT, 'copy_params', (TCopyParams, TCopyParams.thrift_spec), None, ), # 4
)
def __init__(self, session=None, table_name=None, file_name=None, copy_params=None,):
self.session = session
self.table_name = table_name
self.file_name = file_name
self.copy_params = copy_params
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.file_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.copy_params = TCopyParams()
self.copy_params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('import_table_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
if self.file_name is not None:
oprot.writeFieldBegin('file_name', TType.STRING, 3)
oprot.writeString(self.file_name.encode('utf-8') if sys.version_info[0] == 2 else self.file_name)
oprot.writeFieldEnd()
if self.copy_params is not None:
oprot.writeFieldBegin('copy_params', TType.STRUCT, 4)
self.copy_params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class import_table_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('import_table_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class import_geo_table_args(object):
"""
Attributes:
- session
- table_name
- file_name
- copy_params
- row_desc
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'table_name', 'UTF8', None, ), # 2
(3, TType.STRING, 'file_name', 'UTF8', None, ), # 3
(4, TType.STRUCT, 'copy_params', (TCopyParams, TCopyParams.thrift_spec), None, ), # 4
(5, TType.LIST, 'row_desc', (TType.STRUCT, (TColumnType, TColumnType.thrift_spec), False), None, ), # 5
)
def __init__(self, session=None, table_name=None, file_name=None, copy_params=None, row_desc=None,):
self.session = session
self.table_name = table_name
self.file_name = file_name
self.copy_params = copy_params
self.row_desc = row_desc
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.file_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.copy_params = TCopyParams()
self.copy_params.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.row_desc = []
(_etype233, _size230) = iprot.readListBegin()
for _i234 in range(_size230):
_elem235 = TColumnType()
_elem235.read(iprot)
self.row_desc.append(_elem235)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('import_geo_table_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
if self.file_name is not None:
oprot.writeFieldBegin('file_name', TType.STRING, 3)
oprot.writeString(self.file_name.encode('utf-8') if sys.version_info[0] == 2 else self.file_name)
oprot.writeFieldEnd()
if self.copy_params is not None:
oprot.writeFieldBegin('copy_params', TType.STRUCT, 4)
self.copy_params.write(oprot)
oprot.writeFieldEnd()
if self.row_desc is not None:
oprot.writeFieldBegin('row_desc', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.row_desc))
for iter236 in self.row_desc:
iter236.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class import_geo_table_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('import_geo_table_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class import_table_status_args(object):
"""
Attributes:
- session
- import_id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'import_id', 'UTF8', None, ), # 2
)
def __init__(self, session=None, import_id=None,):
self.session = session
self.import_id = import_id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.import_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('import_table_status_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.import_id is not None:
oprot.writeFieldBegin('import_id', TType.STRING, 2)
oprot.writeString(self.import_id.encode('utf-8') if sys.version_info[0] == 2 else self.import_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class import_table_status_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TImportStatus, TImportStatus.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TImportStatus()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('import_table_status_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class start_query_args(object):
"""
Attributes:
- session
- query_ra
- just_explain
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'query_ra', 'UTF8', None, ), # 2
(3, TType.BOOL, 'just_explain', None, None, ), # 3
)
def __init__(self, session=None, query_ra=None, just_explain=None,):
self.session = session
self.query_ra = query_ra
self.just_explain = just_explain
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.query_ra = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.just_explain = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('start_query_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.query_ra is not None:
oprot.writeFieldBegin('query_ra', TType.STRING, 2)
oprot.writeString(self.query_ra.encode('utf-8') if sys.version_info[0] == 2 else self.query_ra)
oprot.writeFieldEnd()
if self.just_explain is not None:
oprot.writeFieldBegin('just_explain', TType.BOOL, 3)
oprot.writeBool(self.just_explain)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class start_query_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TPendingQuery, TPendingQuery.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TPendingQuery()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('start_query_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class execute_first_step_args(object):
"""
Attributes:
- pending_query
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'pending_query', (TPendingQuery, TPendingQuery.thrift_spec), None, ), # 1
)
def __init__(self, pending_query=None,):
self.pending_query = pending_query
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.pending_query = TPendingQuery()
self.pending_query.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('execute_first_step_args')
if self.pending_query is not None:
oprot.writeFieldBegin('pending_query', TType.STRUCT, 1)
self.pending_query.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class execute_first_step_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TStepResult, TStepResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TStepResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('execute_first_step_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class broadcast_serialized_rows_args(object):
"""
Attributes:
- serialized_rows
- row_desc
- query_id
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'serialized_rows', 'UTF8', None, ), # 1
(2, TType.LIST, 'row_desc', (TType.STRUCT, (TColumnType, TColumnType.thrift_spec), False), None, ), # 2
(3, TType.I64, 'query_id', None, None, ), # 3
)
def __init__(self, serialized_rows=None, row_desc=None, query_id=None,):
self.serialized_rows = serialized_rows
self.row_desc = row_desc
self.query_id = query_id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.serialized_rows = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.row_desc = []
(_etype240, _size237) = iprot.readListBegin()
for _i241 in range(_size237):
_elem242 = TColumnType()
_elem242.read(iprot)
self.row_desc.append(_elem242)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.query_id = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('broadcast_serialized_rows_args')
if self.serialized_rows is not None:
oprot.writeFieldBegin('serialized_rows', TType.STRING, 1)
oprot.writeString(self.serialized_rows.encode('utf-8') if sys.version_info[0] == 2 else self.serialized_rows)
oprot.writeFieldEnd()
if self.row_desc is not None:
oprot.writeFieldBegin('row_desc', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.row_desc))
for iter243 in self.row_desc:
iter243.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.query_id is not None:
oprot.writeFieldBegin('query_id', TType.I64, 3)
oprot.writeI64(self.query_id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class broadcast_serialized_rows_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('broadcast_serialized_rows_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class render_vega_raw_pixels_args(object):
"""
Attributes:
- session
- widget_id
- node_idx
- vega_json
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.I64, 'widget_id', None, None, ), # 2
(3, TType.I16, 'node_idx', None, None, ), # 3
(4, TType.STRING, 'vega_json', 'UTF8', None, ), # 4
)
def __init__(self, session=None, widget_id=None, node_idx=None, vega_json=None,):
self.session = session
self.widget_id = widget_id
self.node_idx = node_idx
self.vega_json = vega_json
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.widget_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I16:
self.node_idx = iprot.readI16()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.vega_json = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('render_vega_raw_pixels_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.widget_id is not None:
oprot.writeFieldBegin('widget_id', TType.I64, 2)
oprot.writeI64(self.widget_id)
oprot.writeFieldEnd()
if self.node_idx is not None:
oprot.writeFieldBegin('node_idx', TType.I16, 3)
oprot.writeI16(self.node_idx)
oprot.writeFieldEnd()
if self.vega_json is not None:
oprot.writeFieldBegin('vega_json', TType.STRING, 4)
oprot.writeString(self.vega_json.encode('utf-8') if sys.version_info[0] == 2 else self.vega_json)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class render_vega_raw_pixels_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TRawPixelDataResult, TRawPixelDataResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TRawPixelDataResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('render_vega_raw_pixels_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class insert_data_args(object):
"""
Attributes:
- session
- insert_data
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'insert_data', (TInsertData, TInsertData.thrift_spec), None, ), # 2
)
def __init__(self, session=None, insert_data=None,):
self.session = session
self.insert_data = insert_data
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.insert_data = TInsertData()
self.insert_data.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('insert_data_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.insert_data is not None:
oprot.writeFieldBegin('insert_data', TType.STRUCT, 2)
self.insert_data.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class insert_data_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('insert_data_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_descriptor_args(object):
"""
Attributes:
- session
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'table_name', 'UTF8', None, ), # 2
)
def __init__(self, session=None, table_name=None,):
self.session = session
self.table_name = table_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_descriptor_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_descriptor_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRUCT, (TColumnType, TColumnType.thrift_spec), False), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype245, _vtype246, _size244) = iprot.readMapBegin()
for _i248 in range(_size244):
_key249 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val250 = TColumnType()
_val250.read(iprot)
self.success[_key249] = _val250
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_descriptor_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.success))
for kiter251, viter252 in self.success.items():
oprot.writeString(kiter251.encode('utf-8') if sys.version_info[0] == 2 else kiter251)
viter252.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_row_descriptor_args(object):
"""
Attributes:
- session
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'table_name', 'UTF8', None, ), # 2
)
def __init__(self, session=None, table_name=None,):
self.session = session
self.table_name = table_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_row_descriptor_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_row_descriptor_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, (TColumnType, TColumnType.thrift_spec), False), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype256, _size253) = iprot.readListBegin()
for _i257 in range(_size253):
_elem258 = TColumnType()
_elem258.read(iprot)
self.success.append(_elem258)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_row_descriptor_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter259 in self.success:
iter259.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class render_args(object):
"""
Attributes:
- session
- query
- render_type
- nonce
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'query', 'UTF8', None, ), # 2
(3, TType.STRING, 'render_type', 'UTF8', None, ), # 3
(4, TType.STRING, 'nonce', 'UTF8', None, ), # 4
)
def __init__(self, session=None, query=None, render_type=None, nonce=None,):
self.session = session
self.query = query
self.render_type = render_type
self.nonce = nonce
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.query = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.render_type = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.nonce = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('render_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.query is not None:
oprot.writeFieldBegin('query', TType.STRING, 2)
oprot.writeString(self.query.encode('utf-8') if sys.version_info[0] == 2 else self.query)
oprot.writeFieldEnd()
if self.render_type is not None:
oprot.writeFieldBegin('render_type', TType.STRING, 3)
oprot.writeString(self.render_type.encode('utf-8') if sys.version_info[0] == 2 else self.render_type)
oprot.writeFieldEnd()
if self.nonce is not None:
oprot.writeFieldBegin('nonce', TType.STRING, 4)
oprot.writeString(self.nonce.encode('utf-8') if sys.version_info[0] == 2 else self.nonce)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class render_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TRenderResult, TRenderResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TRenderResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('render_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_rows_for_pixels_args(object):
"""
Attributes:
- session
- widget_id
- pixels
- table_name
- col_names
- column_format
- nonce
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.I64, 'widget_id', None, None, ), # 2
(3, TType.LIST, 'pixels', (TType.STRUCT, (TPixel, TPixel.thrift_spec), False), None, ), # 3
(4, TType.STRING, 'table_name', 'UTF8', None, ), # 4
(5, TType.LIST, 'col_names', (TType.STRING, 'UTF8', False), None, ), # 5
(6, TType.BOOL, 'column_format', None, None, ), # 6
(7, TType.STRING, 'nonce', 'UTF8', None, ), # 7
)
def __init__(self, session=None, widget_id=None, pixels=None, table_name=None, col_names=None, column_format=None, nonce=None,):
self.session = session
self.widget_id = widget_id
self.pixels = pixels
self.table_name = table_name
self.col_names = col_names
self.column_format = column_format
self.nonce = nonce
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.widget_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.pixels = []
(_etype263, _size260) = iprot.readListBegin()
for _i264 in range(_size260):
_elem265 = TPixel()
_elem265.read(iprot)
self.pixels.append(_elem265)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.col_names = []
(_etype269, _size266) = iprot.readListBegin()
for _i270 in range(_size266):
_elem271 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.col_names.append(_elem271)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.column_format = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRING:
self.nonce = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_rows_for_pixels_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.widget_id is not None:
oprot.writeFieldBegin('widget_id', TType.I64, 2)
oprot.writeI64(self.widget_id)
oprot.writeFieldEnd()
if self.pixels is not None:
oprot.writeFieldBegin('pixels', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.pixels))
for iter272 in self.pixels:
iter272.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 4)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
if self.col_names is not None:
oprot.writeFieldBegin('col_names', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.col_names))
for iter273 in self.col_names:
oprot.writeString(iter273.encode('utf-8') if sys.version_info[0] == 2 else iter273)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.column_format is not None:
oprot.writeFieldBegin('column_format', TType.BOOL, 6)
oprot.writeBool(self.column_format)
oprot.writeFieldEnd()
if self.nonce is not None:
oprot.writeFieldBegin('nonce', TType.STRING, 7)
oprot.writeString(self.nonce.encode('utf-8') if sys.version_info[0] == 2 else self.nonce)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_rows_for_pixels_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TPixelResult, TPixelResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TPixelResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_rows_for_pixels_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_row_for_pixel_args(object):
"""
Attributes:
- session
- widget_id
- pixel
- table_name
- col_names
- column_format
- pixelRadius
- nonce
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.I64, 'widget_id', None, None, ), # 2
(3, TType.STRUCT, 'pixel', (TPixel, TPixel.thrift_spec), None, ), # 3
(4, TType.STRING, 'table_name', 'UTF8', None, ), # 4
(5, TType.LIST, 'col_names', (TType.STRING, 'UTF8', False), None, ), # 5
(6, TType.BOOL, 'column_format', None, None, ), # 6
(7, TType.I32, 'pixelRadius', None, None, ), # 7
(8, TType.STRING, 'nonce', 'UTF8', None, ), # 8
)
def __init__(self, session=None, widget_id=None, pixel=None, table_name=None, col_names=None, column_format=None, pixelRadius=None, nonce=None,):
self.session = session
self.widget_id = widget_id
self.pixel = pixel
self.table_name = table_name
self.col_names = col_names
self.column_format = column_format
self.pixelRadius = pixelRadius
self.nonce = nonce
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.widget_id = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.pixel = TPixel()
self.pixel.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.table_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.col_names = []
(_etype277, _size274) = iprot.readListBegin()
for _i278 in range(_size274):
_elem279 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.col_names.append(_elem279)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.column_format = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.I32:
self.pixelRadius = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.nonce = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_row_for_pixel_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.widget_id is not None:
oprot.writeFieldBegin('widget_id', TType.I64, 2)
oprot.writeI64(self.widget_id)
oprot.writeFieldEnd()
if self.pixel is not None:
oprot.writeFieldBegin('pixel', TType.STRUCT, 3)
self.pixel.write(oprot)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 4)
oprot.writeString(self.table_name.encode('utf-8') if sys.version_info[0] == 2 else self.table_name)
oprot.writeFieldEnd()
if self.col_names is not None:
oprot.writeFieldBegin('col_names', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.col_names))
for iter280 in self.col_names:
oprot.writeString(iter280.encode('utf-8') if sys.version_info[0] == 2 else iter280)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.column_format is not None:
oprot.writeFieldBegin('column_format', TType.BOOL, 6)
oprot.writeBool(self.column_format)
oprot.writeFieldEnd()
if self.pixelRadius is not None:
oprot.writeFieldBegin('pixelRadius', TType.I32, 7)
oprot.writeI32(self.pixelRadius)
oprot.writeFieldEnd()
if self.nonce is not None:
oprot.writeFieldBegin('nonce', TType.STRING, 8)
oprot.writeString(self.nonce.encode('utf-8') if sys.version_info[0] == 2 else self.nonce)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_row_for_pixel_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TPixelRowResult, TPixelRowResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TPixelRowResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TMapDException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_row_for_pixel_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 35.561828
| 185
| 0.578564
| 40,056
| 367,247
| 5.050355
| 0.009487
| 0.016239
| 0.029229
| 0.026338
| 0.947127
| 0.921576
| 0.899272
| 0.876138
| 0.860003
| 0.845035
| 0
| 0.009091
| 0.316776
| 367,247
| 10,326
| 186
| 35.565272
| 0.797156
| 0.021819
| 0
| 0.862379
| 1
| 0
| 0.031523
| 0.004446
| 0
| 0
| 0
| 0
| 0
| 1
| 0.110073
| false
| 0.007379
| 0.009224
| 0.034682
| 0.22199
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
aaede6b936f4a1d4cf1ff53250099eb4bd634ad6
| 252
|
py
|
Python
|
Codewars/8kyu/regexp-basics-is-it-a-digit/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/regexp-basics-is-it-a-digit/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/regexp-basics-is-it-a-digit/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
Test.describe('Sample tests')
Test.assert_equals(is_digit(''), False)
Test.assert_equals(is_digit('7'), True)
Test.assert_equals(is_digit(' '), False)
Test.assert_equals(is_digit('a'), False)
Test.assert_equals(is_digit('a5'), False)
| 28
| 41
| 0.734127
| 41
| 252
| 4.268293
| 0.414634
| 0.285714
| 0.457143
| 0.514286
| 0.742857
| 0.742857
| 0.582857
| 0.582857
| 0.582857
| 0.582857
| 0
| 0.021368
| 0.071429
| 252
| 8
| 42
| 31.5
| 0.726496
| 0.055556
| 0
| 0.333333
| 0
| 0
| 0.072034
| 0
| 0
| 0
| 0
| 0
| 0.833333
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c934aae28bab65f2b49ad6e616f3528b486bc7ac
| 123
|
py
|
Python
|
saefportal/analyzer/datastore/__init__.py
|
harry-consulting/SAEF
|
12ef43bbcc3178b8a988e21c1bef035881cf6e6d
|
[
"BSD-2-Clause"
] | 4
|
2020-12-16T13:14:26.000Z
|
2022-03-26T08:54:12.000Z
|
saefportal/analyzer/datastore/__init__.py
|
harry-consulting/SAEF
|
12ef43bbcc3178b8a988e21c1bef035881cf6e6d
|
[
"BSD-2-Clause"
] | 1
|
2022-03-26T09:09:04.000Z
|
2022-03-26T09:09:04.000Z
|
saefportal/analyzer/datastore/__init__.py
|
harry-consulting/SAEF
|
12ef43bbcc3178b8a988e21c1bef035881cf6e6d
|
[
"BSD-2-Clause"
] | 1
|
2020-12-16T13:20:17.000Z
|
2020-12-16T13:20:17.000Z
|
from .datastore import *
from .datastore_factory import *
from .datastore_postgres import *
from .datastore_azure import *
| 24.6
| 33
| 0.804878
| 15
| 123
| 6.4
| 0.4
| 0.541667
| 0.59375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130081
| 123
| 4
| 34
| 30.75
| 0.897196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c93c4b010a27f08afbdd5c82436cae017bc4fc57
| 13,105
|
py
|
Python
|
tokenization/tag_encoding.py
|
ikergarcia1996/Cross-lingual-Annotation-Projection-
|
dbd919ad040c3f435a0f3846ec035f9852ea4ba6
|
[
"Apache-2.0"
] | 1
|
2022-03-18T21:46:07.000Z
|
2022-03-18T21:46:07.000Z
|
tokenization/tag_encoding.py
|
ikergarcia1996/Cross-lingual-Annotation-Projection
|
dbd919ad040c3f435a0f3846ec035f9852ea4ba6
|
[
"Apache-2.0"
] | null | null | null |
tokenization/tag_encoding.py
|
ikergarcia1996/Cross-lingual-Annotation-Projection
|
dbd919ad040c3f435a0f3846ec035f9852ea4ba6
|
[
"Apache-2.0"
] | null | null | null |
from typing import TextIO, List
import os
import argparse
def to_IOB_encoding(input_path: str, output_path: str, block_size=65536) -> None:
# From IOB2 or BILOU
prev_tag_b: str = "O"
prev_tag_t: str = ""
input_file: TextIO = open(input_path, "r", encoding="utf-8")
output_file: TextIO = open(output_path, "w+", encoding="utf-8")
lines: List[str] = input_file.readlines(block_size)
line_no: int = 0
while lines:
text_2_write: List[str] = []
for line in lines:
if line == "\n":
prev_tag_b = "O"
prev_tag_t = ""
text_2_write.append(line)
else:
try:
word, tag = line.rstrip().split(" ")
except ValueError:
input_file.close()
output_file.close()
raise ValueError(
f"Error in line {line_no}, unable to split the text in 2 fields. Text: {line}"
)
if tag == "O":
prev_tag_b = "O"
prev_tag_t = ""
text_2_write.append(line)
else:
try:
b, t = tag.split("-")
except ValueError:
input_file.close()
output_file.close()
raise ValueError(
f"Error in line {line_no}, unable to split the tag in 2 fields. Text: {line} Tag: {tag}"
)
if (b == "B" or b == "U") and prev_tag_b != "O" and prev_tag_t == t:
text_2_write.append(f"{word} B-{t}\n")
else:
text_2_write.append(f"{word} I-{t}\n")
prev_tag_b = b
prev_tag_t = t
line_no += 1
print("".join(text_2_write), file=output_file, end="")
lines = input_file.readlines(block_size)
input_file.close()
output_file.close()
def to_IOB2_encoding(input_path: str, output_path: str, block_size=65536) -> None:
# From IOB or BILOU
prev_tag_b: str = "O"
prev_tag_t: str = ""
input_file: TextIO = open(input_path, "r", encoding="utf-8")
output_file: TextIO = open(output_path, "w+", encoding="utf-8")
lines: List[str] = input_file.readlines(block_size)
line_no: int = 0
while lines:
text_2_write: List[str] = []
for line in lines:
if line == "\n":
prev_tag_b = "O"
prev_tag_t = ""
text_2_write.append(line)
else:
try:
word, tag = line.rstrip().split(" ")
except ValueError:
input_file.close()
output_file.close()
raise ValueError(
f"Error in line {line_no}, unable to split the text in 2 fields. Text: {line}"
)
if tag == "O":
prev_tag_b = "O"
prev_tag_t = ""
text_2_write.append(line)
else:
try:
b, t = tag.split("-")
except ValueError:
input_file.close()
output_file.close()
raise ValueError(
f"Error in line {line_no}, unable to split the tag in 2 fields. Text: {line} Tag: {tag}"
)
if (b == "B" or b == "U") or (
(prev_tag_b == "O") or (prev_tag_t != "" and prev_tag_t != t)
):
text_2_write.append(f"{word} B-{t}\n")
else:
text_2_write.append(f"{word} I-{t}\n")
prev_tag_b = b
prev_tag_t = t
line_no += 1
print("".join(text_2_write), file=output_file, end="")
lines = input_file.readlines(block_size)
input_file.close()
output_file.close()
def to_BILOU_encoding(input_path: str, output_path: str, block_size=65536) -> None:
# From IOB or IOB2
prev_word: str = ""
prev_word_tag_tmp: str = ""
input_file: TextIO = open(input_path, "r", encoding="utf-8")
output_file: TextIO = open(output_path, "w+", encoding="utf-8")
lines: List[str] = input_file.readlines(block_size)
line_no: int = 0
while lines:
text_2_write: List[str] = []
for line in lines:
if line == "\n":
if prev_word != "":
try:
prev_b, prev_t = prev_word_tag_tmp.split("-")
except ValueError:
raise ValueError(
f"Error in line {line_no}, unable to split the tag in 2 fields. Tag: {prev_word_tag_tmp}"
)
if prev_b == "B":
text_2_write.append(f"{prev_word} U-{prev_t}\n")
else:
text_2_write.append(f"{prev_word} L-{prev_t}\n")
text_2_write.append(line)
prev_word: str = ""
prev_word_tag_tmp: str = ""
else:
try:
word, tag = line.rstrip().split(" ")
except ValueError:
input_file.close()
output_file.close()
raise ValueError(
f"Error in line {line_no}, unable to split the text in 2 fields. Text: {line}"
)
if tag == "O":
if prev_word != "":
try:
prev_b, prev_t = prev_word_tag_tmp.split("-")
except ValueError:
raise ValueError(
f"Error in line {line_no}, unable to split the tag in 2 fields. Tag: {prev_word_tag_tmp}"
)
if prev_b == "B":
text_2_write.append(f"{prev_word} U-{prev_t}\n")
else:
text_2_write.append(f"{prev_word} L-{prev_t}\n")
text_2_write.append(line)
prev_word: str = ""
prev_word_tag_tmp: str = ""
else:
try:
b, t = tag.split("-")
except ValueError:
raise ValueError(
f"Error in line {line_no}, unable to split the tag in 2 fields. Text: {line} Tag: {tag}"
)
if prev_word == "":
if b == "U":
text_2_write.append(line)
prev_word = ""
prev_word_tag_tmp = ""
else:
prev_word = word
prev_word_tag_tmp = f"B-{t}"
else:
try:
prev_b, prev_t = prev_word_tag_tmp.split("-")
except ValueError:
raise ValueError(
f"Error in line {line_no}, unable to split the tag in 2 fields. Tag: {prev_word_tag_tmp}"
)
if b == "U":
if prev_b == "B":
text_2_write.append(f"{prev_word} U-{prev_t}\n")
else:
text_2_write.append(f"{prev_word} L-{prev_t}\n")
text_2_write.append(line)
prev_word = ""
prev_word_tag_tmp = ""
elif b == "B":
if prev_b == "B":
text_2_write.append(f"{prev_word} U-{prev_t}\n")
else:
text_2_write.append(f"{prev_word} L-{prev_t}\n")
prev_word = word
prev_word_tag_tmp = f"B-{t}"
else:
if prev_t != t:
if prev_b == "B":
text_2_write.append(f"{prev_word} U-{prev_t}\n")
else:
text_2_write.append(f"{prev_word} L-{prev_t}\n")
prev_word = word
prev_word_tag_tmp = f"B-{t}"
else:
if prev_b == "B":
text_2_write.append(f"{prev_word} B-{prev_t}\n")
else:
text_2_write.append(f"{prev_word} I-{prev_t}\n")
prev_word = word
prev_word_tag_tmp = f"I-{t}"
line_no += 1
print("".join(text_2_write), file=output_file, end="")
lines = input_file.readlines(block_size)
if prev_word != "":
try:
prev_b, prev_t = prev_word_tag_tmp.split("-")
except ValueError:
raise ValueError(
f"Error in line {line_no}, unable to split the tag in 2 fields. Tag: {prev_word_tag_tmp}"
)
if prev_b == "B":
print(f"{prev_word} U-{prev_t}\n", file=output_file, end="")
else:
print(f"{prev_word} L-{prev_t}\n", file=output_file, end="")
input_file.close()
output_file.close()
def rewrite_only_spans(dataset_path: str, block_size=65536) -> None:
input_file: TextIO = open(dataset_path, "r", encoding="utf-8")
output_file: TextIO = open(f"{dataset_path}.tmp", "w+", encoding="utf-8")
lines: List[str] = input_file.readlines(block_size)
line_no: int = 0
text_to_print = []
while lines:
for line in lines:
line_no += 1
line = line.rstrip()
if line == "":
text_to_print.append("\n")
else:
try:
word, tag = line.split(" ")
except ValueError:
input_file.close()
output_file.close()
raise ValueError(
f"Error in line {line_no}. Unable to split in word and tag. Line: {line}"
)
if tag == "O":
tb = "O"
else:
try:
tb, tt = tag.split("-")
except ValueError:
input_file.close()
output_file.close()
raise ValueError(
f"Error in line {line_no}. Unable to tag. Tag: {tag}. Line: {line}"
)
text_to_print.append(f"{word} {tb}\n")
print("".join(text_to_print), file=output_file, end="")
text_to_print = []
lines = input_file.readlines(block_size)
input_file.close()
output_file.close()
os.remove(dataset_path)
os.rename(f"{dataset_path}.tmp", dataset_path)
def rewrite_dataset(dataset_path: str, encoding: str, output_path=None) -> None:
if not os.path.exists(os.path.dirname(output_path)):
os.makedirs(os.path.dirname(output_path))
if encoding is not None:
if encoding == "IOB":
to_IOB_encoding(dataset_path, f"{dataset_path}.tmp")
elif encoding == "IOB2":
to_IOB2_encoding(dataset_path, f"{dataset_path}.tmp")
elif encoding == "BILOU":
to_BILOU_encoding(dataset_path, f"{dataset_path}.tmp")
else:
raise NotImplementedError(
f"Encoding {encoding} not supported. Supported encodings [IOB,IOB2,BILOU]"
)
if output_path is None:
os.remove(dataset_path)
os.rename(f"{dataset_path}.tmp", dataset_path)
else:
os.rename(f"{dataset_path}.tmp", output_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate Alignments FastAlign")
parser.add_argument(
"--dataset_path",
type=str,
required=True,
help="Path to the source dataset in IOB/IOB2/BILOU format",
)
parser.add_argument(
"--encoding",
type=str,
required=True,
choices=["IOB", "IOB2", "BILOU"],
help="Encoding to which the dataset will be converted",
)
parser.add_argument(
"--output_path",
type=str,
required=True,
help="Output path",
)
args = parser.parse_args()
rewrite_dataset(
dataset_path=args.dataset_path,
encoding=args.encoding,
output_path=args.output_path,
)
| 36.301939
| 121
| 0.446623
| 1,448
| 13,105
| 3.790746
| 0.076657
| 0.064128
| 0.054655
| 0.069958
| 0.81217
| 0.792858
| 0.774276
| 0.750228
| 0.744762
| 0.721261
| 0
| 0.011625
| 0.448607
| 13,105
| 360
| 122
| 36.402778
| 0.747993
| 0.004044
| 0
| 0.748344
| 0
| 0.009934
| 0.144543
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016556
| false
| 0
| 0.009934
| 0
| 0.02649
| 0.033113
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c94815c6ce93c9c7ed9d9c3c414b3d21144ebce4
| 9,174
|
py
|
Python
|
apis_core/apis_relations/migrations/0001_initial.py
|
acdh-oeaw/apis-core
|
f7ece05eec46c820321fd28d3e947653dcb98ae7
|
[
"MIT"
] | 11
|
2018-07-11T18:11:40.000Z
|
2022-03-25T11:07:12.000Z
|
apis_core/apis_relations/migrations/0001_initial.py
|
acdh-oeaw/apis-core
|
f7ece05eec46c820321fd28d3e947653dcb98ae7
|
[
"MIT"
] | 309
|
2018-06-11T08:38:50.000Z
|
2022-03-31T13:45:22.000Z
|
apis_core/apis_relations/migrations/0001_initial.py
|
acdh-oeaw/apis-core
|
f7ece05eec46c820321fd28d3e947653dcb98ae7
|
[
"MIT"
] | 5
|
2017-08-21T10:37:07.000Z
|
2021-09-27T19:08:47.000Z
|
# Generated by Django 2.1.12 on 2020-01-21 12:27
import django.db.models.manager
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('apis_entities', '0001_initial'),
('apis_metainfo', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='EventEvent',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='EventWork',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='InstitutionEvent',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='InstitutionInstitution',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='InstitutionPlace',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='InstitutionWork',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='PersonEvent',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='PersonInstitution',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='PersonPerson',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='PersonPlace',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='PersonWork',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='PlaceEvent',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='PlacePlace',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='PlaceWork',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='WorkWork',
fields=[
('tempentityclass_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='apis_metainfo.TempEntityClass')),
('related_workA', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_workB', to='apis_entities.Work')),
('related_workB', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='related_workA', to='apis_entities.Work')),
],
options={
'abstract': False,
},
bases=('apis_metainfo.tempentityclass',),
managers=[
('annotation_links', django.db.models.manager.Manager()),
],
),
]
| 42.669767
| 215
| 0.565075
| 775
| 9,174
| 6.516129
| 0.094194
| 0.053861
| 0.091485
| 0.060594
| 0.903168
| 0.903168
| 0.903168
| 0.903168
| 0.903168
| 0.903168
| 0
| 0.003785
| 0.308808
| 9,174
| 214
| 216
| 42.869159
| 0.792619
| 0.005014
| 0
| 0.797101
| 1
| 0
| 0.201512
| 0.097743
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009662
| 0
| 0.028986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c95e1ff2287d7af477a648c6eb5799a52fb347a3
| 10,563
|
py
|
Python
|
Algorithm.Python/stubs/QuantConnect/Indicators/__CandlestickPatterns_3.py
|
gaoxiaojun/Lean
|
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
|
[
"Apache-2.0"
] | 2
|
2020-12-08T11:27:20.000Z
|
2021-04-06T13:21:15.000Z
|
Algorithm.Python/stubs/QuantConnect/Indicators/__CandlestickPatterns_3.py
|
gaoxiaojun/Lean
|
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
|
[
"Apache-2.0"
] | null | null | null |
Algorithm.Python/stubs/QuantConnect/Indicators/__CandlestickPatterns_3.py
|
gaoxiaojun/Lean
|
9dca43bccb720d0df91e4bfc1d363b71e3a36cb5
|
[
"Apache-2.0"
] | 1
|
2020-12-08T11:27:21.000Z
|
2020-12-08T11:27:21.000Z
|
from .__CandlestickPatterns_4 import *
import typing
import QuantConnect.Indicators.CandlestickPatterns
import datetime
class MorningDojiStar(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Morning Doji Star candlestick pattern
MorningDojiStar(name: str, penetration: Decimal)
MorningDojiStar(penetration: Decimal)
MorningDojiStar()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str, penetration: float) -> QuantConnect.Indicators.CandlestickPatterns.MorningDojiStar:
pass
@typing.overload
def __init__(self, penetration: float) -> QuantConnect.Indicators.CandlestickPatterns.MorningDojiStar:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.MorningDojiStar:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.MorningDojiStar:
pass
IsReady: bool
class MorningStar(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Morning Star candlestick pattern
MorningStar(name: str, penetration: Decimal)
MorningStar(penetration: Decimal)
MorningStar()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str, penetration: float) -> QuantConnect.Indicators.CandlestickPatterns.MorningStar:
pass
@typing.overload
def __init__(self, penetration: float) -> QuantConnect.Indicators.CandlestickPatterns.MorningStar:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.MorningStar:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.MorningStar:
pass
IsReady: bool
class OnNeck(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
On-Neck candlestick pattern indicator
OnNeck(name: str)
OnNeck()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.OnNeck:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.OnNeck:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.OnNeck:
pass
IsReady: bool
class Piercing(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Piercing candlestick pattern
Piercing(name: str)
Piercing()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.Piercing:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.Piercing:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.Piercing:
pass
IsReady: bool
class RickshawMan(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Rickshaw Man candlestick pattern
RickshawMan(name: str)
RickshawMan()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.RickshawMan:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.RickshawMan:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.RickshawMan:
pass
IsReady: bool
class RiseFallThreeMethods(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Rising/Falling Three Methods candlestick pattern
RiseFallThreeMethods(name: str)
RiseFallThreeMethods()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.RiseFallThreeMethods:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.RiseFallThreeMethods:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.RiseFallThreeMethods:
pass
IsReady: bool
class SeparatingLines(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Separating Lines candlestick pattern indicator
SeparatingLines(name: str)
SeparatingLines()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.SeparatingLines:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.SeparatingLines:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.SeparatingLines:
pass
IsReady: bool
class ShootingStar(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Shooting Star candlestick pattern
ShootingStar(name: str)
ShootingStar()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.ShootingStar:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.ShootingStar:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.ShootingStar:
pass
IsReady: bool
class ShortLineCandle(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Short Line Candle candlestick pattern indicator
ShortLineCandle(name: str)
ShortLineCandle()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.ShortLineCandle:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.ShortLineCandle:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.ShortLineCandle:
pass
IsReady: bool
class SpinningTop(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Spinning Top candlestick pattern indicator
SpinningTop(name: str)
SpinningTop()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.SpinningTop:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.SpinningTop:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.SpinningTop:
pass
IsReady: bool
class StalledPattern(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Stalled Pattern candlestick pattern
StalledPattern(name: str)
StalledPattern()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.StalledPattern:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.StalledPattern:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.StalledPattern:
pass
IsReady: bool
class StickSandwich(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Stick Sandwich candlestick pattern indicator
StickSandwich(name: str)
StickSandwich()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.StickSandwich:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.StickSandwich:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.StickSandwich:
pass
IsReady: bool
class Takuri(QuantConnect.Indicators.CandlestickPatterns.CandlestickPattern, System.IComparable, QuantConnect.Indicators.IIndicator[IBaseDataBar], QuantConnect.Indicators.IIndicator, System.IComparable[IIndicator[IBaseDataBar]]):
"""
Takuri (Dragonfly Doji with very long lower shadow) candlestick pattern indicator
Takuri(name: str)
Takuri()
"""
def Reset(self) -> None:
pass
@typing.overload
def __init__(self, name: str) -> QuantConnect.Indicators.CandlestickPatterns.Takuri:
pass
@typing.overload
def __init__(self) -> QuantConnect.Indicators.CandlestickPatterns.Takuri:
pass
def __init__(self, *args) -> QuantConnect.Indicators.CandlestickPatterns.Takuri:
pass
IsReady: bool
| 31.067647
| 243
| 0.734545
| 910
| 10,563
| 8.342857
| 0.08022
| 0.234721
| 0.297023
| 0.07745
| 0.809932
| 0.744468
| 0.744468
| 0.744468
| 0.641728
| 0.529505
| 0
| 0.000115
| 0.17372
| 10,563
| 339
| 244
| 31.159292
| 0.86973
| 0.112847
| 0
| 0.650602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.325301
| false
| 0.325301
| 0.024096
| 0
| 0.506024
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
a385cabaecf3d52a8332533d210bd8c6752248de
| 57,004
|
py
|
Python
|
embyapi/api/user_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
embyapi/api/user_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
embyapi/api/user_service_api.py
|
stanionascu/python-embyapi
|
a3f7aa49aea4052277cc43605c0d89bc6ff21913
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
Emby Server API
Explore the Emby Server API # noqa: E501
OpenAPI spec version: 4.1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from embyapi.api_client import ApiClient
class UserServiceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_users_by_id(self, id, **kwargs): # noqa: E501
"""Deletes a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_users_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_users_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_users_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_users_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_users_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users(self, **kwargs): # noqa: E501
"""Gets a list of users # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool is_hidden: Optional filter by IsHidden=true or false
:param bool is_disabled: Optional filter by IsDisabled=true or false
:return: list[UserDto]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_users_with_http_info(**kwargs) # noqa: E501
return data
def get_users_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of users # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool is_hidden: Optional filter by IsHidden=true or false
:param bool is_disabled: Optional filter by IsDisabled=true or false
:return: list[UserDto]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['is_hidden', 'is_disabled'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'is_hidden' in params:
query_params.append(('IsHidden', params['is_hidden'])) # noqa: E501
if 'is_disabled' in params:
query_params.append(('IsDisabled', params['is_disabled'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserDto]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users_by_id(self, id, **kwargs): # noqa: E501
"""Gets a user by Id # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_users_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_users_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets a user by Id # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_users_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users_public(self, **kwargs): # noqa: E501
"""Gets a list of publicly visible users for display on a login screen. # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_public(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[UserDto]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_public_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_users_public_with_http_info(**kwargs) # noqa: E501
return data
def get_users_public_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of publicly visible users for display on a login screen. # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_public_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[UserDto]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users_public" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/Public', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserDto]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_authenticatebyname(self, body, x_emby_authorization, **kwargs): # noqa: E501
"""Authenticates a user # noqa: E501
Authenticate a user by nane and password. A 200 status code indicates success, while anything in the 400 or 500 range indicates failure --- No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_authenticatebyname(body, x_emby_authorization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AuthenticateUserByName body: AuthenticateUserByName (required)
:param str x_emby_authorization: The authorization header can be either named 'Authorization' or 'X-Emby-Authorization'. It must be of the following schema: Emby UserId=\"(guid)\", Client=\"(string)\", Device=\"(string)\", DeviceId=\"(string)\", Version=\"string\", Token=\"(string)\" Please consult the documentation for further details. (required)
:return: AuthenticationAuthenticationResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_authenticatebyname_with_http_info(body, x_emby_authorization, **kwargs) # noqa: E501
else:
(data) = self.post_users_authenticatebyname_with_http_info(body, x_emby_authorization, **kwargs) # noqa: E501
return data
def post_users_authenticatebyname_with_http_info(self, body, x_emby_authorization, **kwargs): # noqa: E501
"""Authenticates a user # noqa: E501
Authenticate a user by nane and password. A 200 status code indicates success, while anything in the 400 or 500 range indicates failure --- No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_authenticatebyname_with_http_info(body, x_emby_authorization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AuthenticateUserByName body: AuthenticateUserByName (required)
:param str x_emby_authorization: The authorization header can be either named 'Authorization' or 'X-Emby-Authorization'. It must be of the following schema: Emby UserId=\"(guid)\", Client=\"(string)\", Device=\"(string)\", DeviceId=\"(string)\", Version=\"string\", Token=\"(string)\" Please consult the documentation for further details. (required)
:return: AuthenticationAuthenticationResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'x_emby_authorization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_authenticatebyname" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_authenticatebyname`") # noqa: E501
# verify the required parameter 'x_emby_authorization' is set
if ('x_emby_authorization' not in params or
params['x_emby_authorization'] is None):
raise ValueError("Missing the required parameter `x_emby_authorization` when calling `post_users_authenticatebyname`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'x_emby_authorization' in params:
header_params['X-Emby-Authorization'] = params['x_emby_authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/AuthenticateByName', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthenticationAuthenticationResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id(self, body, id, **kwargs): # noqa: E501
"""Updates a user # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserDto body: UserDto: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UserDto body: UserDto: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_authenticate(self, body, id, **kwargs): # noqa: E501
"""Authenticates a user # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_authenticate(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AuthenticateUser body: AuthenticateUser (required)
:param str id: (required)
:return: AuthenticationAuthenticationResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_authenticate_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_authenticate_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_authenticate_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Authenticates a user # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_authenticate_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AuthenticateUser body: AuthenticateUser (required)
:param str id: (required)
:return: AuthenticationAuthenticationResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_authenticate" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_authenticate`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_authenticate`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Authenticate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuthenticationAuthenticationResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_configuration(self, body, id, **kwargs): # noqa: E501
"""Updates a user configuration # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_configuration(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ConfigurationUserConfiguration body: UserConfiguration: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_configuration_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_configuration_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_configuration_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user configuration # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_configuration_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ConfigurationUserConfiguration body: UserConfiguration: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_configuration`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Configuration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_easypassword(self, body, id, **kwargs): # noqa: E501
"""Updates a user's easy password # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_easypassword(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateUserEasyPassword body: UpdateUserEasyPassword (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_easypassword_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_easypassword_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_easypassword_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user's easy password # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_easypassword_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateUserEasyPassword body: UpdateUserEasyPassword (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_easypassword" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_easypassword`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_easypassword`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/EasyPassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_password(self, body, id, **kwargs): # noqa: E501
"""Updates a user's password # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_password(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateUserPassword body: UpdateUserPassword (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_password_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_password_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_password_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user's password # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_password_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateUserPassword body: UpdateUserPassword (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_password" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_password`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_password`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Password', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_policy(self, body, id, **kwargs): # noqa: E501
"""Updates a user policy # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_policy(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UsersUserPolicy body: UserPolicy: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_policy_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_policy_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_users_by_id_policy_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a user policy # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_policy_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param UsersUserPolicy body: UserPolicy: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_policy" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_policy`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_policy`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Policy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_forgotpassword(self, body, **kwargs): # noqa: E501
"""Initiates the forgot password process for a local user # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_forgotpassword(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPassword body: ForgotPassword (required)
:return: UsersForgotPasswordResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_forgotpassword_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_users_forgotpassword_with_http_info(body, **kwargs) # noqa: E501
return data
def post_users_forgotpassword_with_http_info(self, body, **kwargs): # noqa: E501
"""Initiates the forgot password process for a local user # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_forgotpassword_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPassword body: ForgotPassword (required)
:return: UsersForgotPasswordResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_forgotpassword" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_forgotpassword`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/ForgotPassword', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UsersForgotPasswordResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_forgotpassword_pin(self, body, **kwargs): # noqa: E501
"""Redeems a forgot password pin # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_forgotpassword_pin(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPasswordPin body: ForgotPasswordPin (required)
:return: UsersPinRedeemResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_forgotpassword_pin_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_users_forgotpassword_pin_with_http_info(body, **kwargs) # noqa: E501
return data
def post_users_forgotpassword_pin_with_http_info(self, body, **kwargs): # noqa: E501
"""Redeems a forgot password pin # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_forgotpassword_pin_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ForgotPasswordPin body: ForgotPasswordPin (required)
:return: UsersPinRedeemResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_forgotpassword_pin" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_forgotpassword_pin`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/ForgotPassword/Pin', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UsersPinRedeemResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_new(self, body, **kwargs): # noqa: E501
"""Creates a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_new(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateUserByName body: CreateUserByName (required)
:return: UserDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_new_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_users_new_with_http_info(body, **kwargs) # noqa: E501
return data
def post_users_new_with_http_info(self, body, **kwargs): # noqa: E501
"""Creates a user # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_new_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateUserByName body: CreateUserByName (required)
:return: UserDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_new" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_new`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/New', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.918768
| 368
| 0.608501
| 6,538
| 57,004
| 5.056439
| 0.035791
| 0.051544
| 0.019057
| 0.030491
| 0.974833
| 0.969509
| 0.964911
| 0.960404
| 0.954929
| 0.951481
| 0
| 0.016723
| 0.300295
| 57,004
| 1,427
| 369
| 39.946741
| 0.812115
| 0.331275
| 0
| 0.820413
| 1
| 0
| 0.190244
| 0.051685
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037468
| false
| 0.040052
| 0.005168
| 0
| 0.098191
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6e3f39c45de4f3eeae05492c9364a20372cfad30
| 1,566
|
py
|
Python
|
pyaz/monitor/diagnostic_settings/categories/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/monitor/diagnostic_settings/categories/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/monitor/diagnostic_settings/categories/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from .... pyaz_utils import _call_az
def show(name, resource, resource_group=None, resource_namespace=None, resource_parent=None, resource_type=None):
'''
Required Parameters:
- name -- The name of the diagnostic setting.
- resource -- Name or ID of the target resource.
Optional Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_namespace -- Target resource provider namespace.
- resource_parent -- Target resource parent path, if applicable.
- resource_type -- Target resource type. Can also accept namespace/type format (Ex: 'Microsoft.Compute/virtualMachines')
'''
return _call_az("az monitor diagnostic-settings categories show", locals())
def list(resource, resource_group=None, resource_namespace=None, resource_parent=None, resource_type=None):
'''
List the diagnostic settings categories for the specified resource.
Required Parameters:
- resource -- Name or ID of the target resource.
Optional Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- resource_namespace -- Target resource provider namespace.
- resource_parent -- Target resource parent path, if applicable.
- resource_type -- Target resource type. Can also accept namespace/type format (Ex: 'Microsoft.Compute/virtualMachines')
'''
return _call_az("az monitor diagnostic-settings categories list", locals())
| 44.742857
| 128
| 0.736271
| 191
| 1,566
| 5.91623
| 0.26178
| 0.099115
| 0.074336
| 0.044248
| 0.831858
| 0.831858
| 0.831858
| 0.831858
| 0.831858
| 0.831858
| 0
| 0
| 0.176884
| 1,566
| 34
| 129
| 46.058824
| 0.876649
| 0.664112
| 0
| 0
| 0
| 0
| 0.212963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
2817aba5f60c8f38a7c2ecd6b88ed0fbc08fb417
| 4,152
|
py
|
Python
|
yolo2xml.py
|
dc-cheny/Record-Multi-Cameras-Opencv
|
dd32c2b8c293315e586724f341acf3df465dab83
|
[
"MIT"
] | null | null | null |
yolo2xml.py
|
dc-cheny/Record-Multi-Cameras-Opencv
|
dd32c2b8c293315e586724f341acf3df465dab83
|
[
"MIT"
] | null | null | null |
yolo2xml.py
|
dc-cheny/Record-Multi-Cameras-Opencv
|
dd32c2b8c293315e586724f341acf3df465dab83
|
[
"MIT"
] | null | null | null |
"""
Created by Chalice on 2022-03-20.
https://github.com/dc-cheny
"""
import os
import json
from pascal_voc_writer import Writer
from PIL import Image
def parse_cls(txt_path):
""" parse class file to cls2idx/idx2cls dict.
"""
txt_path = str(txt_path)
if not txt_path:
return None
else:
with open(txt_path, 'r') as f:
c = [x.strip('\n').strip() for x in f.readlines()]
return {_c: str(idx) for idx, _c in enumerate(c)}, {str(idx): _c for idx, _c in enumerate(c)}
def yolo2xml(yolo_ann_dir, xml_dir):
""" Suppose that images and anns are in the same folder.
"""
if not os.path.exists(xml_dir):
os.makedirs(xml_dir)
all_files = os.listdir(yolo_ann_dir)
yolo_ann_files = [x for x in all_files if x.endswith('.txt') and x != 'classes.txt']
image_files = [x for x in all_files if x.endswith('.jpg')]
ann_name2image_name = {im.split('.')[0]+'.txt': im for im in image_files}
class_txt_path = os.path.join(yolo_ann_dir, 'classes.txt')
_, idx2cls = parse_cls(class_txt_path)
for yaf in yolo_ann_files:
yaf_path = os.path.join(yolo_ann_dir, yaf)
xml_filename = yaf.split('.')[0]+'.xml'
image_path = os.path.join(yolo_ann_dir, ann_name2image_name[yaf])
try:
img_obj = Image.open(open(image_path, 'rb')).convert('RGB')
except Exception as e:
print('Load img {} error, detail={}'.format(image_path, e))
continue
width, height = img_obj.size
with open(yaf_path, 'r') as f:
yolo_ann_contents = [x.strip('\n').strip() for x in f.readlines()]
voc_writer = Writer(xml_filename, width, height)
for yac in yolo_ann_contents:
yac = yac.split()
if yac[0] in ['10', '12']:
print(yaf)
label, bbox = idx2cls[yac[0]], list(map(float, yac[1:]))
x, y, w, h = bbox
xmin = int(width*(2*x+w)/2)
xmax = int(width*(2*x-w)/2)
ymin = int(height*(2*y+h)/2)
ymax = int(height*(2*y-h)/2)
voc_writer.addObject(label, xmin, ymin, xmax, ymax)
voc_writer.save(os.path.join(xml_dir, xml_filename))
print('Done!')
return
def xml2yolo(yolo_ann_dir, xml_dir):
""" Suppose that images and anns are in the same folder.
"""
if not os.path.exists(xml_dir):
os.makedirs(xml_dir)
all_files = os.listdir(yolo_ann_dir)
yolo_ann_files = [x for x in all_files if x.endswith('.txt') and x != 'classes.txt']
image_files = [x for x in all_files if x.endswith('.jpg')]
ann_name2image_name = {im.split('.')[0]+'.txt': im for im in image_files}
class_txt_path = os.path.join(yolo_ann_dir, 'classes.txt')
_, idx2cls = parse_cls(class_txt_path)
for yaf in yolo_ann_files:
yaf_path = os.path.join(yolo_ann_dir, yaf)
xml_filename = yaf.split('.')[0]+'.xml'
image_path = os.path.join(yolo_ann_dir, ann_name2image_name[yaf])
try:
img_obj = Image.open(open(image_path, 'rb')).convert('RGB')
except Exception as e:
print('Load img {} error, detail={}'.format(image_path, e))
continue
width, height = img_obj.size
with open(yaf_path, 'r') as f:
yolo_ann_contents = [x.strip('\n').strip() for x in f.readlines()]
voc_writer = Writer(xml_filename, width, height)
for yac in yolo_ann_contents:
label, [x, y, w, h] = idx2cls[yac[0]], list(map(eval, yac[1:]))
xmin = int(width*(2*x+w)/2)
xmax = int(width*(2*x-w)/2)
ymin = int(height*(2*y+h)/2)
ymax = int(height*(2*y-h)/2)
voc_writer.addObject(label, xmin, ymin, xmax, ymax)
voc_writer.save(os.path.join(xml_dir, xml_filename))
print('Done!')
return
if __name__ == '__main__':
yolo_ann_dir = '/home/xixiang/Record-Multi-Cameras-Opencv-main/20220317/20220315_sample_with_label_batch1_new'
xml_dir = '/home/xixiang/Record-Multi-Cameras-Opencv-main/20220317/20220315_sample_with_label_batch1_new_xml'
yolo2xml(yolo_ann_dir, xml_dir)
| 36.421053
| 114
| 0.609104
| 644
| 4,152
| 3.714286
| 0.21118
| 0.058528
| 0.050167
| 0.035117
| 0.852007
| 0.836957
| 0.807692
| 0.807692
| 0.807692
| 0.795987
| 0
| 0.026897
| 0.247832
| 4,152
| 113
| 115
| 36.743363
| 0.739033
| 0.05395
| 0
| 0.714286
| 0
| 0
| 0.094151
| 0.048743
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.047619
| 0
| 0.130952
| 0.059524
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
283392b1eddbdfb7e69d484be3ef3112a2d1d573
| 37,408
|
py
|
Python
|
SBaaS_thermodynamics/stage03_quantification_tfba_execute.py
|
dmccloskey/SBaaS_thermodynamics
|
0eeed0191f952ea0226ab8bbc234a30638fb2f9f
|
[
"MIT"
] | null | null | null |
SBaaS_thermodynamics/stage03_quantification_tfba_execute.py
|
dmccloskey/SBaaS_thermodynamics
|
0eeed0191f952ea0226ab8bbc234a30638fb2f9f
|
[
"MIT"
] | null | null | null |
SBaaS_thermodynamics/stage03_quantification_tfba_execute.py
|
dmccloskey/SBaaS_thermodynamics
|
0eeed0191f952ea0226ab8bbc234a30638fb2f9f
|
[
"MIT"
] | null | null | null |
#SBaaS
from .stage03_quantification_tfba_io import stage03_quantification_tfba_io
from .stage03_quantification_measuredData_query import stage03_quantification_measuredData_query
from .stage03_quantification_dG_f_query import stage03_quantification_dG_f_query
from .stage03_quantification_dG_r_query import stage03_quantification_dG_r_query
from .stage03_quantification_otherData_query import stage03_quantification_otherData_query
from .stage03_quantification_simulation_query import stage03_quantification_simulation_query
from SBaaS_models.models_COBRA_dependencies import models_COBRA_dependencies
# Dependencies from thermodynamics
from thermodynamics.thermodynamics_dG_f_data import thermodynamics_dG_f_data
from thermodynamics.thermodynamics_dG_r_data import thermodynamics_dG_r_data
from thermodynamics.thermodynamics_metabolomicsData import thermodynamics_metabolomicsData
from thermodynamics.thermodynamics_otherData import thermodynamics_otherData
from thermodynamics.thermodynamics_simulatedData import thermodynamics_simulatedData
from thermodynamics.thermodynamics_tfba import thermodynamics_tfba
# Dependencies from resources
from sampling.sampling import cobra_sampling
from sampling.sampling_statistics import cobra_sampling,cobra_sampling_n
from cobra.manipulation.modify import convert_to_irreversible
class stage03_quantification_tfba_execute(stage03_quantification_tfba_io,
stage03_quantification_measuredData_query,
stage03_quantification_dG_f_query,
stage03_quantification_dG_r_query,
stage03_quantification_otherData_query,
stage03_quantification_simulation_query):
def execute_thermodynamicSampling(self,simulation_id_I,models_I,
data_dir_I,rxn_ids_I=[],
inconsistent_dG_f_I=[],inconsistent_concentrations_I=[],
inconsistent_tcc_I=[],
measured_concentration_coverage_criteria_I=0.5,
measured_dG_f_coverage_criteria_I=0.99,
solver_I='glpk'):
'''execute a thermodynamic analysis using the thermodynamic
module for cobrapy
Input:
inconsistent_dG_f_I = dG_f measured values to be substituted for estimated values
inconsistent_concentrations_I = concentration measured values to be substituted for estimated values
inconsistent_tcc_I = reactions considered feasible to be changed to infeasible so that dG0_r constraints do not break the model
measured_concentration_coverage_criteria_I = float, minimum concentration coverage to consider for feasibility
measured_dG_f_coveragea_criteria_I = float, minimum dG_f coverage to consider for feasibility
data_dir_I = directory of sampled points
solver_I = string, solver name
'''
modelsCOBRA = models_COBRA_dependencies();
print('execute_thermodynamicSampling...')
# get simulation information
simulation_info_all = [];
simulation_info_all = self.get_rows_simulationIDAndSimulationType_dataStage03QuantificationSimulation(simulation_id_I,'sampling')
if not simulation_info_all:
print('simulation not found!')
return;
simulation_info = simulation_info_all[0]; # unique constraint guarantees only 1 row will be returned
# get simulation parameters
simulation_parameters_all = [];
simulation_parameters_all = self.get_rows_simulationID_dataStage03QuantificationSimulationParameters(simulation_id_I);
if not simulation_parameters_all:
print('simulation not found!')
return;
simulation_parameters = simulation_parameters_all[0]; # unique constraint guarantees only 1 row will be returned
# get the cobra model
cobra_model = models_I[simulation_info['model_id']];
# copy the model
cobra_model_copy = cobra_model.copy();
# get rxn_ids
if rxn_ids_I:
rxn_ids = rxn_ids_I;
else:
rxn_ids = [];
rxn_ids = self.get_rows_experimentIDAndModelIDAndSampleNameAbbreviation_dataStage03QuantificationMeasuredFluxes(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['sample_name_abbreviation']);
for rxn in rxn_ids:
# constrain the model
cobra_model_copy.reactions.get_by_id(rxn['rxn_id']).lower_bound = rxn['flux_lb'];
cobra_model_copy.reactions.get_by_id(rxn['rxn_id']).upper_bound = rxn['flux_ub'];
# make the model irreversible
convert_to_irreversible(cobra_model_copy);
# get otherData
pH,temperature,ionic_strength = {},{},{}
pH,temperature,ionic_strength = self.get_rowsFormatted_experimentIDAndTimePointAndSampleNameAbbreviation_dataStage03QuantificationOtherData(simulation_info['experiment_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
# load pH, ionic_strength, and temperature parameters
other_data = thermodynamics_otherData(pH_I=pH,temperature_I=temperature,ionic_strength_I=ionic_strength);
other_data.check_data();
# get dG_f data:
dG_f = {};
dG_f = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDGf(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
dG_f_data = thermodynamics_dG_f_data(dG_f_I=dG_f);
dG_f_data.format_dG_f();
dG_f_data.generate_estimated_dG_f(cobra_model)
dG_f_data.check_data();
# remove an inconsistent dGf values
if inconsistent_dG_f_I: dG_f_data.remove_measured_dG_f(inconsistent_dG_f_I)
# query metabolomicsData
concentrations = [];
concentrations = self.get_rowsDict_experimentIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationMetabolomicsData(simulation_info['experiment_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
# load metabolomicsData
metabolomics_data = thermodynamics_metabolomicsData(measured_concentrations_I=concentrations);
metabolomics_data.generate_estimated_metabolomics_data(cobra_model);
# remove an inconsistent concentration values
if inconsistent_concentrations_I: metabolomics_data.remove_measured_concentrations(inconsistent_concentrations_I);
# get dG0r, dGr, and tcc data
dG0_r = {};
dG0_r = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDG0r(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'])
measured_concentration_coverage,measured_dG_f_coverage,feasible = {},{},{};
measured_concentration_coverage,measured_dG_f_coverage,feasible = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationTCC(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'],0,0)
tcc = thermodynamics_dG_r_data(dG0_r_I = dG0_r,
dG_r_coverage_I = measured_dG_f_coverage,
metabolomics_coverage_I = measured_concentration_coverage,
thermodynamic_consistency_check_I = feasible);
if inconsistent_tcc_I: tcc.change_feasibleReactions(inconsistent_tcc_I);
# apply tfba constraints
tfba = thermodynamics_tfba()
tfba._add_conc_ln_constraints_transport(cobra_model_copy, metabolomics_data.measured_concentrations, metabolomics_data.estimated_concentrations,
tcc.dG0_r, other_data.pH,other_data.temperature,tcc.metabolomics_coverage,
tcc.dG_r_coverage, tcc.thermodynamic_consistency_check,
measured_concentration_coverage_criteria_I, measured_dG_f_coverage_criteria_I,
use_measured_concentrations=True,use_measured_dG0_r=True);
# Test model
if modelsCOBRA.test_model(cobra_model_I=cobra_model_copy):
sampling = cobra_sampling(data_dir_I = data_dir_I);
if simulation_parameters['sampler_id']=='gpSampler':
filename_model = simulation_id_I + '.mat';
filename_script = simulation_id_I + '.m';
filename_points = simulation_id_I + '_points' + '.mat';
sampling.export_sampling_matlab(cobra_model=cobra_model_copy,filename_model=filename_model,filename_script=filename_script,filename_points=filename_points,\
solver_id_I = simulation_parameters['solver_id'],\
n_points_I = simulation_parameters['n_points'],\
n_steps_I = simulation_parameters['n_steps'],\
max_time_I = simulation_parameters['max_time']);
elif simulation_parameters['sampler_id']=='optGpSampler':
return;
else:
print('sampler_id not recognized');
else:
print('no solution found!');
def check_thermodynamicConstraints(self,simulation_id_I,models_I,rxn_ids_I=[],
inconsistent_dG_f_I=[],inconsistent_concentrations_I=[],
measured_concentration_coverage_criteria_I=0.5,
measured_dG_f_coverage_criteria_I=0.99,
n_checks_I = 5,
diagnose_solver_I='glpk',diagnose_threshold_I=0.98,diagnose_break_I=0.1):
print('check_thermodynamicConstraints...')
# get simulation information
simulation_info_all = [];
simulation_info_all = self.get_rows_simulationID_dataStage03QuantificationSimulation(simulation_id_I);
if not simulation_info_all:
print('simulation not found!')
return;
simulation_info = simulation_info_all[0]; # unique constraint guarantees only 1 row will be returned
# get simulation parameters
simulation_parameters_all = [];
simulation_parameters_all = self.get_rows_simulationID_dataStage03QuantificationSimulationParameters(simulation_id_I);
if not simulation_parameters_all:
print('simulation not found!')
return;
simulation_parameters = simulation_parameters_all[0]; # unique constraint guarantees only 1 row will be returned
# get the cobra model
cobra_model = models_I[simulation_info['model_id']];
# copy the model
cobra_model_copy = cobra_model.copy();
# get rxn_ids
if rxn_ids_I:
rxn_ids = rxn_ids_I;
else:
rxn_ids = [];
rxn_ids = self.get_rows_experimentIDAndModelIDAndSampleNameAbbreviation_dataStage03QuantificationMeasuredFluxes(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['sample_name_abbreviation']);
for rxn in rxn_ids:
# constrain the model
cobra_model_copy.reactions.get_by_id(rxn['rxn_id']).lower_bound = rxn['flux_lb'];
cobra_model_copy.reactions.get_by_id(rxn['rxn_id']).upper_bound = rxn['flux_ub'];
# make the model irreversible
convert_to_irreversible(cobra_model_copy);
# get otherData
pH,temperature,ionic_strength = {},{},{}
pH,temperature,ionic_strength = self.get_rowsFormatted_experimentIDAndTimePointAndSampleNameAbbreviation_dataStage03QuantificationOtherData(simulation_info['experiment_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
# load pH, ionic_strength, and temperature parameters
other_data = thermodynamics_otherData(pH_I=pH,temperature_I=temperature,ionic_strength_I=ionic_strength);
other_data.check_data();
# get dG_f data:
dG_f = {};
dG_f = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDGf(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
dG_f_data = thermodynamics_dG_f_data(dG_f_I=dG_f);
dG_f_data.format_dG_f();
dG_f_data.generate_estimated_dG_f(cobra_model)
dG_f_data.check_data();
# remove an inconsistent dGf values
if inconsistent_dG_f_I: dG_f_data.remove_measured_dG_f(inconsistent_dG_f_I)
# query metabolomicsData
concentrations = [];
concentrations = self.get_rowsDict_experimentIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationMetabolomicsData(simulation_info['experiment_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
# load metabolomicsData
metabolomics_data = thermodynamics_metabolomicsData(measured_concentrations_I=concentrations);
metabolomics_data.generate_estimated_metabolomics_data(cobra_model);
# remove an inconsistent concentration values
if inconsistent_concentrations_I: metabolomics_data.remove_measured_concentrations(inconsistent_concentrations_I);
# get dG0r, dGr, and tcc data
dG0_r = {};
dG0_r = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDG0r(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'])
measured_concentration_coverage,measured_dG_f_coverage,feasible = {},{},{};
measured_concentration_coverage,measured_dG_f_coverage,feasible = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationTCC(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'],0,0)
tcc = thermodynamics_dG_r_data(dG0_r_I = dG0_r,
dG_r_coverage_I = measured_dG_f_coverage,
metabolomics_coverage_I = measured_concentration_coverage,
thermodynamic_consistency_check_I = feasible);
# apply tfba constraints
tfba = thermodynamics_tfba()
thermodynamic_constraints_check,diagnose_variables_1,diagnose_variables_2,diagnose_variables_3 = tfba.check_conc_ln_constraints_transport(cobra_model_copy,
metabolomics_data.measured_concentrations, metabolomics_data.estimated_concentrations,
tcc.dG0_r, other_data.pH,other_data.temperature,tcc.metabolomics_coverage,
tcc.dG_r_coverage, tcc.thermodynamic_consistency_check,
measured_concentration_coverage_criteria_I, measured_dG_f_coverage_criteria_I,
n_checks_I = 5,
diagnose_solver_I=None,diagnose_threshold_I=0.98,diagnose_break_I=0.1);
return thermodynamic_constraints_check,diagnose_variables_1,diagnose_variables_2,diagnose_variables_3;
def execute_analyzeThermodynamicSamplingPoints(self,simulation_id_I,models_I,
data_dir_I,data_dir_O,rxn_ids_I=[],
inconsistent_dG_f_I=[],inconsistent_concentrations_I=[],
inconsistent_tcc_I=[],
measured_concentration_coverage_criteria_I=0.5,
measured_dG_f_coverage_criteria_I=0.99,
remove_pointsNotInSolutionSpace_I=True,
min_pointsInSolutionSpace_I=1000):
'''Load and analyze sampling points
Input:
inconsistent_dG_f_I = dG_f measured values to be substituted for estimated values
inconsistent_concentrations_I = concentration measured values to be substituted for estimated values
inconsistent_tcc_I = reactions considered feasible to be changed to infeasible so that dG0_r constraints do not break the model
measured_concentration_coverage_criteria_I = float, minimum concentration coverage to consider for feasibility
measured_dG_f_coveragea_criteria_I = float, minimum dG_f coverage to consider for feasibility
remove_pointsNotInSolutionSpace_I = boolean, remove points not in the solution space (i.e., within the lower/upper bounds)
min_pointsInSolutionSpace_I = int, minimum number of points in the solution space.
if the number of points is less that the minimum, the solution space will be increased by
(upper_bounds-lower_bounds)/4 until the minimum number of points is met
data_dir_I = directory of sampled points
data_dir_O = director to write QC'd sampled points
solver_I = string, solver name
'''
print('analyzing sampling points');
modelsCOBRA = models_COBRA_dependencies();
# get simulation information
simulation_info_all = [];
simulation_info_all = self.get_rows_simulationIDAndSimulationType_dataStage03QuantificationSimulation(simulation_id_I,'sampling')
if not simulation_info_all:
print('simulation not found!')
return;
simulation_info = simulation_info_all[0]; # unique constraint guarantees only 1 row will be returned
# get simulation parameters
simulation_parameters_all = [];
simulation_parameters_all = self.get_rows_simulationID_dataStage03QuantificationSimulationParameters(simulation_id_I);
if not simulation_parameters_all:
print('simulation not found!')
return;
simulation_parameters = simulation_parameters_all[0]; # unique constraint guarantees only 1 row will be returned
# get the cobra model
cobra_model = models_I[simulation_info['model_id']];
# copy the model
cobra_model_copy = cobra_model.copy();
# get rxn_ids
if rxn_ids_I:
rxn_ids = rxn_ids_I;
else:
rxn_ids = [];
rxn_ids = self.get_rows_experimentIDAndModelIDAndSampleNameAbbreviation_dataStage03QuantificationMeasuredFluxes(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['sample_name_abbreviation']);
for rxn in rxn_ids:
# constrain the model
cobra_model_copy.reactions.get_by_id(rxn['rxn_id']).lower_bound = rxn['flux_lb'];
cobra_model_copy.reactions.get_by_id(rxn['rxn_id']).upper_bound = rxn['flux_ub'];
# make the model irreversible
convert_to_irreversible(cobra_model_copy);
# get otherData
pH,temperature,ionic_strength = {},{},{}
pH,temperature,ionic_strength = self.get_rowsFormatted_experimentIDAndTimePointAndSampleNameAbbreviation_dataStage03QuantificationOtherData(simulation_info['experiment_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
# load pH, ionic_strength, and temperature parameters
other_data = thermodynamics_otherData(pH_I=pH,temperature_I=temperature,ionic_strength_I=ionic_strength);
other_data.check_data();
# get dG_f data:
dG_f = {};
dG_f = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDGf(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
dG_f_data = thermodynamics_dG_f_data(dG_f_I=dG_f);
dG_f_data.format_dG_f();
dG_f_data.generate_estimated_dG_f(cobra_model)
dG_f_data.check_data();
# remove an inconsistent dGf values
if inconsistent_dG_f_I: dG_f_data.remove_measured_dG_f(inconsistent_dG_f_I)
# query metabolomicsData
concentrations = [];
concentrations = self.get_rowsDict_experimentIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationMetabolomicsData(simulation_info['experiment_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
# load metabolomicsData
metabolomics_data = thermodynamics_metabolomicsData(measured_concentrations_I=concentrations);
metabolomics_data.generate_estimated_metabolomics_data(cobra_model);
# remove an inconsistent concentration values
if inconsistent_concentrations_I: metabolomics_data.remove_measured_concentrations(inconsistent_concentrations_I);
# get dG0r, dGr, and tcc data
dG0_r = {};
dG0_r = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDG0r(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'])
measured_concentration_coverage,measured_dG_f_coverage,feasible = {},{},{};
measured_concentration_coverage,measured_dG_f_coverage,feasible = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationTCC(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'],0,0)
tcc = thermodynamics_dG_r_data(dG0_r_I = dG0_r,
dG_r_coverage_I = measured_dG_f_coverage,
metabolomics_coverage_I = measured_concentration_coverage,
thermodynamic_consistency_check_I = feasible);
if inconsistent_tcc_I: tcc.change_feasibleReactions(inconsistent_tcc_I);
# apply tfba constraints
tfba = thermodynamics_tfba()
tfba._add_conc_ln_constraints_transport(cobra_model_copy, metabolomics_data.measured_concentrations, metabolomics_data.estimated_concentrations,
tcc.dG0_r, other_data.pH,other_data.temperature,tcc.metabolomics_coverage,
tcc.dG_r_coverage, tcc.thermodynamic_consistency_check,
measured_concentration_coverage_criteria_I, measured_dG_f_coverage_criteria_I,
use_measured_concentrations=True,use_measured_dG0_r=True);
# Test each model
if modelsCOBRA.test_model(cobra_model_I=cobra_model_copy):
sampling = cobra_sampling(data_dir_I = data_dir_I,model_I = cobra_model_copy);
if simulation_parameters['sampler_id']=='gpSampler':
# load the results of sampling
filename_points = simulation_id_I + '_points' + '.mat';
sampling.get_points_matlab(filename_points,'sampler_out');
# check if points were sampled outside the solution space
if remove_pointsNotInSolutionSpace_I:
pruned_reactions = sampling.remove_points_notInSolutionSpace(min_points_I=min_pointsInSolutionSpace_I);
## check if the model contains loops
#sampling.simulate_loops(data_fva=settings.workspace_data + '/loops_fva_tmp.json');
#sampling.find_loops(data_fva=settings.workspace_data + '/loops_fva_tmp.json');
#sampling.remove_loopsFromPoints();
sampling.descriptive_statistics();
elif simulation_parameters['sampler_id']=='optGpSampler':
return;
else:
print('sampler_id not recognized');
# add data to the database
row = {'simulation_id':simulation_id_I,
'simulation_dateAndTime':sampling.simulation_dateAndTime,
'mixed_fraction':sampling.mixed_fraction,
'data_dir':data_dir_I+'/'+filename_points,
'infeasible_loops':sampling.loops,
'used_':True,
'comment_':None
};
self.add_dataStage03QuantificationSampledPoints([row])
#row = None;
#row = data_stage03_quantification_sampledPoints(
# simulation_id_I,
# sampling.simulation_dateAndTime,
# sampling.mixed_fraction,
# data_dir_I+'/'+filename_points,
# sampling.loops,
# True,
# None);
#self.session.add(row);
# write points to json file
# add data to the database
sampledData_O = [];
for k,v in sampling.points_statistics.items():
type,units = tfba.get_variableTypeAndUnits(k);
row = {'simulation_id':simulation_id_I,
'simulation_dateAndTime':sampling.simulation_dateAndTime,
'variable_id':k,
'variable_type':type,
'variable_units':units,
'sampling_points':None, #v['points'],
'sampling_ave':v['ave'],
'sampling_var':v['var'],
'sampling_lb':v['lb'],
'sampling_ub':v['ub'],
'sampling_ci':0.95,
'sampling_min':v['min'],
'sampling_max':v['max'],
'sampling_median':v['median'],
'sampling_iq_1':v['iq_1'],
'sampling_iq_3':v['iq_3'],
'used_':True,
'comment_':None};
sampledData_O.append(row);
#row = None;
#row = data_stage03_quantification_sampledData(
# simulation_id_I,
# sampling.simulation_dateAndTime,
# k,
# type,
# units,
# None, #v['points'],
# v['ave'],
# v['var'],
# v['lb'],
# v['ub'],
# v['min'],
# 0.95,
# v['max'],
# v['median'],
# v['iq_1'],
# v['iq_3'],
# True,
# None);
#self.session.add(row);
self.add_dataStage03QuantificationSampledData(sampledData_O);
else:
print('no solution found!');
def execute_tfba(self,simulation_id_I,models_I,
data_dir_I,rxn_ids_I=[],
inconsistent_dG_f_I=[],inconsistent_concentrations_I=[],
inconsistent_tcc_I=[],
measured_concentration_coverage_criteria_I=0.5,
measured_dG_f_coverage_criteria_I=0.99,
solver_I='glpk'):
'''execute a thermodynamic flux balance analysis using the thermodynamic
module for cobrapy
Input:
inconsistent_dG_f_I = dG_f measured values to be substituted for estimated values
inconsistent_concentrations_I = concentration measured values to be substituted for estimated values
inconsistent_tcc_I = reactions considered feasible to be changed to infeasible so that dG0_r constraints do not break the model
measured_concentration_coverage_criteria_I = float, minimum concentration coverage to consider for feasibility
measured_dG_f_coveragea_criteria_I = float, minimum dG_f coverage to consider for feasibility
solver_I = string, solver name
'''
def execute_tfva(self,simulation_id_I,models_I,
data_dir_I,rxn_ids_I=[],
inconsistent_dG_f_I=[],inconsistent_concentrations_I=[],
inconsistent_tcc_I=[],
measured_concentration_coverage_criteria_I=0.5,
measured_dG_f_coverage_criteria_I=0.99,
solver_I='glpk'):
'''execute a thermodynamic flux variability analysis on the reaction variables
using the thermodynamic module for cobrapy
Input:
inconsistent_dG_f_I = dG_f measured values to be substituted for estimated values
inconsistent_concentrations_I = concentration measured values to be substituted for estimated values
inconsistent_tcc_I = reactions considered feasible to be changed to infeasible so that dG0_r constraints do not break the model
measured_concentration_coverage_criteria_I = float, minimum concentration coverage to consider for feasibility
measured_dG_f_coveragea_criteria_I = float, minimum dG_f coverage to consider for feasibility
solver_I = string, solver name
'''
data_O = []
modelsCOBRA = models_COBRA_dependencies();
# get simulation information
simulation_info_all = [];
simulation_info_all = self.get_rows_simulationIDAndSimulationType_dataStage03QuantificationSimulation(simulation_id_I,'sampling')
if not simulation_info_all:
print('simulation not found!')
return;
simulation_info = simulation_info_all[0]; # unique constraint guarantees only 1 row will be returned
# get simulation parameters
simulation_parameters_all = [];
simulation_parameters_all = self.get_rows_simulationID_dataStage03QuantificationSimulationParameters(simulation_id_I);
if not simulation_parameters_all:
print('simulation not found!')
return;
simulation_parameters = simulation_parameters_all[0]; # unique constraint guarantees only 1 row will be returned
# get the cobra model
cobra_model = models_I[simulation_info['model_id']];
# copy the model
cobra_model_copy = cobra_model.copy();
# get rxn_ids
if rxn_ids_I:
rxn_ids = rxn_ids_I;
else:
rxn_ids = [];
rxn_ids = self.get_rows_experimentIDAndModelIDAndSampleNameAbbreviation_dataStage03QuantificationMeasuredFluxes(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['sample_name_abbreviation']);
for rxn in rxn_ids:
# constrain the model
cobra_model_copy.reactions.get_by_id(rxn['rxn_id']).lower_bound = rxn['flux_lb'];
cobra_model_copy.reactions.get_by_id(rxn['rxn_id']).upper_bound = rxn['flux_ub'];
# make the model irreversible
convert_to_irreversible(cobra_model_copy); #TODO ensure that the model is irreversible before!
# get otherData
pH,temperature,ionic_strength = {},{},{}
pH,temperature,ionic_strength = self.get_rowsFormatted_experimentIDAndTimePointAndSampleNameAbbreviation_dataStage03QuantificationOtherData(simulation_info['experiment_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
# load pH, ionic_strength, and temperature parameters
other_data = thermodynamics_otherData(pH_I=pH,temperature_I=temperature,ionic_strength_I=ionic_strength);
other_data.check_data();
# get dG_f data:
dG_f = {};
dG_f = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDGf(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
dG_f_data = thermodynamics_dG_f_data(dG_f_I=dG_f);
dG_f_data.format_dG_f();
dG_f_data.generate_estimated_dG_f(cobra_model)
dG_f_data.check_data();
# remove an inconsistent dGf values
if inconsistent_dG_f_I: dG_f_data.remove_measured_dG_f(inconsistent_dG_f_I)
# query metabolomicsData
concentrations = [];
concentrations = self.get_rowsDict_experimentIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationMetabolomicsData(simulation_info['experiment_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation']);
# load metabolomicsData
metabolomics_data = thermodynamics_metabolomicsData(measured_concentrations_I=concentrations);
metabolomics_data.generate_estimated_metabolomics_data(cobra_model);
# remove an inconsistent concentration values
if inconsistent_concentrations_I: metabolomics_data.remove_measured_concentrations(inconsistent_concentrations_I);
# get dG0r, dGr, and tcc data
dG0_r = {};
dG0_r = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDG0r(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'])
dG_r = {};
dG_r = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationDG0r(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'])
measured_concentration_coverage,measured_dG_f_coverage,feasible = {},{},{};
measured_concentration_coverage,measured_dG_f_coverage,feasible = self.get_rowsDict_experimentIDAndModelIDAndTimePointAndSampleNameAbbreviations_dataStage03QuantificationTCC(simulation_info['experiment_id'],simulation_info['model_id'],simulation_info['time_point'],simulation_info['sample_name_abbreviation'],0,0)
tcc = thermodynamics_dG_r_data(dG0_r_I = dG0_r,
dG_r_I = dG_r,
dG_r_coverage_I = measured_dG_f_coverage,
metabolomics_coverage_I = measured_concentration_coverage,
thermodynamic_consistency_check_I = feasible);
if inconsistent_tcc_I: tcc.change_feasibleReactions(inconsistent_tcc_I);
import optlang
optlang.available_solvers['GUROBI']=True
solver_I = 'gurobi'
tfba = thermodynamics_tfba()
cobra_model_copy1 = cobra_model_copy.copy()
tfba.tfva(cobra_model_copy1,
tcc.dG_r,#tcc.dG0_r,
tcc.dG_r_coverage, tcc.thermodynamic_consistency_check,
use_measured_dG0_r=True, reaction_list=None,fraction_of_optimum=1.0, solver=solver_I,
objective_sense="maximize")
tfba.analyze_tfva_results(threshold=1e-6)
for k,v in tfba.tfva_data.items():
analysis_list = []
for k1,v1 in tfba.tfva_analysis[k].items():
if v1:
analysis_list.append(k1)
analysis_str = ';'.join(analysis_list)
row = {'simulation_id':simulation_id_I,
'simulation_dateAndTime':None,
'variable_id':k,
'variable_type':'flux',
'variable_units':v['flux_units'],
'fva_minimum':v['flux_lb'],
'fva_maximum':v['flux_ub'],
'fva_method':'tfva',
'allow_loops':True,
'fva_options':None,
'solver_id':solver_I,
'used_':True,
'comment_':analysis_str}
data_O.append(row)
cobra_model_copy1 = cobra_model_copy.copy()
tfba.tfva_dG_r(cobra_model_copy1,
tcc.dG_r,#tcc.dG0_r,
tcc.dG_r_coverage, tcc.thermodynamic_consistency_check,
use_measured_dG0_r=True, fraction_of_optimum=1.0, solver=solver_I,
objective_sense="maximize")
for k,v in tfba.tfva_dG_r_data.items():
row = {'simulation_id':simulation_id_I,
'simulation_dateAndTime':None,
'variable_id':k,
'variable_type':'dG_r',
'variable_units':v['flux_units'],
'fva_minimum':v['flux_lb'],
'fva_maximum':v['flux_ub'],
'fva_method':'tfva',
'allow_loops':True,
'fva_options':None,
'solver_id':solver_I,
'used_':True,
'comment_':None}
data_O.append(row)
cobra_model_copy1 = cobra_model_copy.copy()
tfba.tfva_concentrations(cobra_model_copy1,
metabolomics_data.measured_concentrations, metabolomics_data.estimated_concentrations,
tcc.dG0_r,other_data.temperature,tcc.metabolomics_coverage,
tcc.dG_r_coverage, tcc.thermodynamic_consistency_check,
measured_concentration_coverage_criteria = 0.5, measured_dG_f_coverage_criteria = 0.99,
use_measured_concentrations=True,use_measured_dG0_r=True,fraction_of_optimum=1.0, solver=solver_I,
objective_sense="maximize")
for k,v in tfba.tfva_concentrations_data.items():
row = {'simulation_id':simulation_id_I,
'simulation_dateAndTime':None,
'variable_id':k,
'variable_type':'conc_ln',
'variable_units':v['flux_units'],
'fva_minimum':v['flux_lb'],
'fva_maximum':v['flux_ub'],
'fva_method':'tfva',
'allow_loops':True,
'fva_options':None,
'solver_id':solver_I,
'used_':True,
'comment_':None}
data_O.append(row)
if data_O:
self.add_rows_table('data_stage03_quantification_simulatedData_tfva',data_O);
| 63.189189
| 321
| 0.687874
| 3,988
| 37,408
| 6.027583
| 0.072718
| 0.06523
| 0.027956
| 0.027041
| 0.848115
| 0.831725
| 0.816707
| 0.805225
| 0.801564
| 0.797321
| 0
| 0.009165
| 0.238746
| 37,408
| 592
| 322
| 63.189189
| 0.834955
| 0.170391
| 0
| 0.747017
| 0
| 0
| 0.095364
| 0.026878
| 0
| 0
| 0
| 0.001689
| 0
| 1
| 0.011933
| false
| 0
| 0.040573
| 0
| 0.057279
| 0.0358
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
954e82dfed2cf57db012a317c71c370ae48a85f1
| 71
|
py
|
Python
|
hermit/shards/__init__.py
|
rsbondi/hermit
|
0007d8077547484efa173295090775b3cd5ce75b
|
[
"Apache-2.0"
] | 1
|
2021-07-23T16:43:06.000Z
|
2021-07-23T16:43:06.000Z
|
hermit/shards/__init__.py
|
rsbondi/hermit
|
0007d8077547484efa173295090775b3cd5ce75b
|
[
"Apache-2.0"
] | null | null | null |
hermit/shards/__init__.py
|
rsbondi/hermit
|
0007d8077547484efa173295090775b3cd5ce75b
|
[
"Apache-2.0"
] | 1
|
2019-08-16T09:36:32.000Z
|
2019-08-16T09:36:32.000Z
|
from .interface import *
from .shard import *
from .shard_set import *
| 17.75
| 24
| 0.746479
| 10
| 71
| 5.2
| 0.5
| 0.384615
| 0.576923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 71
| 3
| 25
| 23.666667
| 0.881356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
956d8f10b93f693b4ed49784cc9fe296aac62b99
| 18,771
|
py
|
Python
|
sdk/python/pulumi_github/repository_collaborator.py
|
pulumi/pulumi-github
|
303ed7a28cbfe6ba1db75b3b365dcfa0b00e6e91
|
[
"ECL-2.0",
"Apache-2.0"
] | 20
|
2020-04-27T15:05:01.000Z
|
2022-02-08T00:28:32.000Z
|
sdk/python/pulumi_github/repository_collaborator.py
|
pulumi/pulumi-github
|
303ed7a28cbfe6ba1db75b3b365dcfa0b00e6e91
|
[
"ECL-2.0",
"Apache-2.0"
] | 103
|
2020-05-01T17:36:32.000Z
|
2022-03-31T15:26:35.000Z
|
sdk/python/pulumi_github/repository_collaborator.py
|
pulumi/pulumi-github
|
303ed7a28cbfe6ba1db75b3b365dcfa0b00e6e91
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2020-06-24T19:15:02.000Z
|
2021-11-26T08:05:46.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['RepositoryCollaboratorArgs', 'RepositoryCollaborator']
@pulumi.input_type
class RepositoryCollaboratorArgs:
def __init__(__self__, *,
repository: pulumi.Input[str],
username: pulumi.Input[str],
permission: Optional[pulumi.Input[str]] = None,
permission_diff_suppression: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a RepositoryCollaborator resource.
:param pulumi.Input[str] repository: The GitHub repository
:param pulumi.Input[str] username: The user to add to the repository as a collaborator.
:param pulumi.Input[str] permission: The permission of the outside collaborator for the repository.
Must be one of `pull`, `push`, `maintain`, `triage` or `admin` for organization-owned repositories.
Must be `push` for personal repositories. Defaults to `push`.
:param pulumi.Input[bool] permission_diff_suppression: Suppress plan diffs for `triage` and `maintain`. Defaults to `false`.
"""
pulumi.set(__self__, "repository", repository)
pulumi.set(__self__, "username", username)
if permission is not None:
pulumi.set(__self__, "permission", permission)
if permission_diff_suppression is not None:
pulumi.set(__self__, "permission_diff_suppression", permission_diff_suppression)
@property
@pulumi.getter
def repository(self) -> pulumi.Input[str]:
"""
The GitHub repository
"""
return pulumi.get(self, "repository")
@repository.setter
def repository(self, value: pulumi.Input[str]):
pulumi.set(self, "repository", value)
@property
@pulumi.getter
def username(self) -> pulumi.Input[str]:
"""
The user to add to the repository as a collaborator.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: pulumi.Input[str]):
pulumi.set(self, "username", value)
@property
@pulumi.getter
def permission(self) -> Optional[pulumi.Input[str]]:
"""
The permission of the outside collaborator for the repository.
Must be one of `pull`, `push`, `maintain`, `triage` or `admin` for organization-owned repositories.
Must be `push` for personal repositories. Defaults to `push`.
"""
return pulumi.get(self, "permission")
@permission.setter
def permission(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "permission", value)
@property
@pulumi.getter(name="permissionDiffSuppression")
def permission_diff_suppression(self) -> Optional[pulumi.Input[bool]]:
"""
Suppress plan diffs for `triage` and `maintain`. Defaults to `false`.
"""
return pulumi.get(self, "permission_diff_suppression")
@permission_diff_suppression.setter
def permission_diff_suppression(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "permission_diff_suppression", value)
@pulumi.input_type
class _RepositoryCollaboratorState:
def __init__(__self__, *,
invitation_id: Optional[pulumi.Input[str]] = None,
permission: Optional[pulumi.Input[str]] = None,
permission_diff_suppression: Optional[pulumi.Input[bool]] = None,
repository: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RepositoryCollaborator resources.
:param pulumi.Input[str] invitation_id: ID of the invitation to be used in `UserInvitationAccepter`
:param pulumi.Input[str] permission: The permission of the outside collaborator for the repository.
Must be one of `pull`, `push`, `maintain`, `triage` or `admin` for organization-owned repositories.
Must be `push` for personal repositories. Defaults to `push`.
:param pulumi.Input[bool] permission_diff_suppression: Suppress plan diffs for `triage` and `maintain`. Defaults to `false`.
:param pulumi.Input[str] repository: The GitHub repository
:param pulumi.Input[str] username: The user to add to the repository as a collaborator.
"""
if invitation_id is not None:
pulumi.set(__self__, "invitation_id", invitation_id)
if permission is not None:
pulumi.set(__self__, "permission", permission)
if permission_diff_suppression is not None:
pulumi.set(__self__, "permission_diff_suppression", permission_diff_suppression)
if repository is not None:
pulumi.set(__self__, "repository", repository)
if username is not None:
pulumi.set(__self__, "username", username)
@property
@pulumi.getter(name="invitationId")
def invitation_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of the invitation to be used in `UserInvitationAccepter`
"""
return pulumi.get(self, "invitation_id")
@invitation_id.setter
def invitation_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "invitation_id", value)
@property
@pulumi.getter
def permission(self) -> Optional[pulumi.Input[str]]:
"""
The permission of the outside collaborator for the repository.
Must be one of `pull`, `push`, `maintain`, `triage` or `admin` for organization-owned repositories.
Must be `push` for personal repositories. Defaults to `push`.
"""
return pulumi.get(self, "permission")
@permission.setter
def permission(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "permission", value)
@property
@pulumi.getter(name="permissionDiffSuppression")
def permission_diff_suppression(self) -> Optional[pulumi.Input[bool]]:
"""
Suppress plan diffs for `triage` and `maintain`. Defaults to `false`.
"""
return pulumi.get(self, "permission_diff_suppression")
@permission_diff_suppression.setter
def permission_diff_suppression(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "permission_diff_suppression", value)
@property
@pulumi.getter
def repository(self) -> Optional[pulumi.Input[str]]:
"""
The GitHub repository
"""
return pulumi.get(self, "repository")
@repository.setter
def repository(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repository", value)
@property
@pulumi.getter
def username(self) -> Optional[pulumi.Input[str]]:
"""
The user to add to the repository as a collaborator.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username", value)
class RepositoryCollaborator(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
permission: Optional[pulumi.Input[str]] = None,
permission_diff_suppression: Optional[pulumi.Input[bool]] = None,
repository: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a GitHub repository collaborator resource.
This resource allows you to add/remove collaborators from repositories in your
organization or personal account. For organization repositories, collaborators can
have explicit (and differing levels of) read, write, or administrator access to
specific repositories, without giving the user full organization membership.
For personal repositories, collaborators can only be granted write
(implictly includes read) permission.
When applied, an invitation will be sent to the user to become a collaborator
on a repository. When destroyed, either the invitation will be cancelled or the
collaborator will be removed from the repository.
Further documentation on GitHub collaborators:
- [Adding outside collaborators to your personal repositories](https://help.github.com/en/github/setting-up-and-managing-your-github-user-account/managing-access-to-your-personal-repositories)
- [Adding outside collaborators to repositories in your organization](https://help.github.com/articles/adding-outside-collaborators-to-repositories-in-your-organization/)
- [Converting an organization member to an outside collaborator](https://help.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)
## Example Usage
```python
import pulumi
import pulumi_github as github
# Add a collaborator to a repository
a_repo_collaborator = github.RepositoryCollaborator("aRepoCollaborator",
permission="admin",
repository="our-cool-repo",
username="SomeUser")
```
## Import
GitHub Repository Collaborators can be imported using an ID made up of `repository:username`, e.g.
```sh
$ pulumi import github:index/repositoryCollaborator:RepositoryCollaborator collaborator terraform:someuser
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] permission: The permission of the outside collaborator for the repository.
Must be one of `pull`, `push`, `maintain`, `triage` or `admin` for organization-owned repositories.
Must be `push` for personal repositories. Defaults to `push`.
:param pulumi.Input[bool] permission_diff_suppression: Suppress plan diffs for `triage` and `maintain`. Defaults to `false`.
:param pulumi.Input[str] repository: The GitHub repository
:param pulumi.Input[str] username: The user to add to the repository as a collaborator.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RepositoryCollaboratorArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a GitHub repository collaborator resource.
This resource allows you to add/remove collaborators from repositories in your
organization or personal account. For organization repositories, collaborators can
have explicit (and differing levels of) read, write, or administrator access to
specific repositories, without giving the user full organization membership.
For personal repositories, collaborators can only be granted write
(implictly includes read) permission.
When applied, an invitation will be sent to the user to become a collaborator
on a repository. When destroyed, either the invitation will be cancelled or the
collaborator will be removed from the repository.
Further documentation on GitHub collaborators:
- [Adding outside collaborators to your personal repositories](https://help.github.com/en/github/setting-up-and-managing-your-github-user-account/managing-access-to-your-personal-repositories)
- [Adding outside collaborators to repositories in your organization](https://help.github.com/articles/adding-outside-collaborators-to-repositories-in-your-organization/)
- [Converting an organization member to an outside collaborator](https://help.github.com/articles/converting-an-organization-member-to-an-outside-collaborator/)
## Example Usage
```python
import pulumi
import pulumi_github as github
# Add a collaborator to a repository
a_repo_collaborator = github.RepositoryCollaborator("aRepoCollaborator",
permission="admin",
repository="our-cool-repo",
username="SomeUser")
```
## Import
GitHub Repository Collaborators can be imported using an ID made up of `repository:username`, e.g.
```sh
$ pulumi import github:index/repositoryCollaborator:RepositoryCollaborator collaborator terraform:someuser
```
:param str resource_name: The name of the resource.
:param RepositoryCollaboratorArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RepositoryCollaboratorArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
permission: Optional[pulumi.Input[str]] = None,
permission_diff_suppression: Optional[pulumi.Input[bool]] = None,
repository: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RepositoryCollaboratorArgs.__new__(RepositoryCollaboratorArgs)
__props__.__dict__["permission"] = permission
__props__.__dict__["permission_diff_suppression"] = permission_diff_suppression
if repository is None and not opts.urn:
raise TypeError("Missing required property 'repository'")
__props__.__dict__["repository"] = repository
if username is None and not opts.urn:
raise TypeError("Missing required property 'username'")
__props__.__dict__["username"] = username
__props__.__dict__["invitation_id"] = None
super(RepositoryCollaborator, __self__).__init__(
'github:index/repositoryCollaborator:RepositoryCollaborator',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
invitation_id: Optional[pulumi.Input[str]] = None,
permission: Optional[pulumi.Input[str]] = None,
permission_diff_suppression: Optional[pulumi.Input[bool]] = None,
repository: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None) -> 'RepositoryCollaborator':
"""
Get an existing RepositoryCollaborator resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] invitation_id: ID of the invitation to be used in `UserInvitationAccepter`
:param pulumi.Input[str] permission: The permission of the outside collaborator for the repository.
Must be one of `pull`, `push`, `maintain`, `triage` or `admin` for organization-owned repositories.
Must be `push` for personal repositories. Defaults to `push`.
:param pulumi.Input[bool] permission_diff_suppression: Suppress plan diffs for `triage` and `maintain`. Defaults to `false`.
:param pulumi.Input[str] repository: The GitHub repository
:param pulumi.Input[str] username: The user to add to the repository as a collaborator.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RepositoryCollaboratorState.__new__(_RepositoryCollaboratorState)
__props__.__dict__["invitation_id"] = invitation_id
__props__.__dict__["permission"] = permission
__props__.__dict__["permission_diff_suppression"] = permission_diff_suppression
__props__.__dict__["repository"] = repository
__props__.__dict__["username"] = username
return RepositoryCollaborator(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="invitationId")
def invitation_id(self) -> pulumi.Output[str]:
"""
ID of the invitation to be used in `UserInvitationAccepter`
"""
return pulumi.get(self, "invitation_id")
@property
@pulumi.getter
def permission(self) -> pulumi.Output[Optional[str]]:
"""
The permission of the outside collaborator for the repository.
Must be one of `pull`, `push`, `maintain`, `triage` or `admin` for organization-owned repositories.
Must be `push` for personal repositories. Defaults to `push`.
"""
return pulumi.get(self, "permission")
@property
@pulumi.getter(name="permissionDiffSuppression")
def permission_diff_suppression(self) -> pulumi.Output[Optional[bool]]:
"""
Suppress plan diffs for `triage` and `maintain`. Defaults to `false`.
"""
return pulumi.get(self, "permission_diff_suppression")
@property
@pulumi.getter
def repository(self) -> pulumi.Output[str]:
"""
The GitHub repository
"""
return pulumi.get(self, "repository")
@property
@pulumi.getter
def username(self) -> pulumi.Output[str]:
"""
The user to add to the repository as a collaborator.
"""
return pulumi.get(self, "username")
| 45.894866
| 200
| 0.667785
| 2,060
| 18,771
| 5.91699
| 0.103884
| 0.055952
| 0.053983
| 0.045123
| 0.829518
| 0.799655
| 0.783739
| 0.773895
| 0.765772
| 0.743293
| 0
| 0.00007
| 0.240158
| 18,771
| 408
| 201
| 46.007353
| 0.854519
| 0.435033
| 0
| 0.630208
| 1
| 0
| 0.109821
| 0.047507
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15625
| false
| 0.005208
| 0.026042
| 0
| 0.276042
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95bd7f3c803d3b8af928932562acb478e75327b5
| 53,084
|
py
|
Python
|
news/tests.py
|
manavshrivastavagit/django-restapi
|
52261c93b58422b0e39cae656ae9409ea03a488d
|
[
"MIT"
] | 5
|
2017-01-13T08:17:25.000Z
|
2020-04-07T12:50:51.000Z
|
news/tests.py
|
manavshrivastavagit/django-restapi
|
52261c93b58422b0e39cae656ae9409ea03a488d
|
[
"MIT"
] | 2
|
2016-10-25T12:51:30.000Z
|
2017-10-14T18:38:59.000Z
|
news/tests.py
|
manavshrivastavagit/django-restapi
|
52261c93b58422b0e39cae656ae9409ea03a488d
|
[
"MIT"
] | 4
|
2017-09-05T19:03:13.000Z
|
2021-03-23T07:47:41.000Z
|
from django.contrib.auth.models import User
from rest_framework.test import APITestCase, APIClient
from rest_framework.reverse import reverse
from rest_framework import status
from students.models import Class, Subject, Student, Teacher
from .models import News, Comment
from .serializers import NewsSerializer, CommentSerializer
class NewsStudentsViewSetTestCase(APITestCase):
def setUp(self):
self.client = APIClient()
self.list_view_name = 'news:students_news-list'
self.detail_view_name = 'news:students_news-detail'
self.user = User(username='test', email='test@test.com')
self.user.set_password('password123')
self.user.save()
self.clazz = Class.objects.create(number=10, letter='A')
self.student = Student.objects.create(user=self.user, clazz=self.clazz)
self.news = News.objects.create(
title='test news',
content='test news content',
class_number=self.clazz.number,
class_letter=self.clazz.letter,
author=self.user,
)
self.comment = Comment.objects.create(
news=self.news,
author=self.user,
content='test comment content'
)
def test_news_list_with_anonymous_user(self):
response = self.client.get(reverse(self.list_view_name))
self.assertEqual(response.data['detail'], 'Authentication credentials were not provided.')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_news_detail_with_anonymous_user(self):
response = self.client.get(reverse(self.detail_view_name, kwargs={'pk': self.news.id}))
self.assertEqual(response.data['detail'], 'Authentication credentials were not provided.')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_news_with_authenticated_user(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(reverse(self.list_view_name))
self.assertIsNotNone(response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_detail_with_authenticated_user(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(reverse(self.detail_view_name, kwargs={'pk': self.news.id}))
self.assertIsNotNone(response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_list_with_teacher_account(self):
teacher_user = User.objects.create(username='teacher', password='123456')
subject = Subject.objects.create(title='Maths')
teacher = Teacher.objects.create(user=teacher_user, subject=subject)
self.client.force_authenticate(user=teacher_user)
response = self.client.get(reverse(self.list_view_name))
self.assertEqual(
response.data['detail'], 'Only students are allowed to view and modify this content.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_news_list_with_same_class(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(reverse(self.list_view_name))
results = response.data['results']
self.assertEqual(results[0]['title'], self.news.title)
self.assertEqual(results[0]['content'], self.news.content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_list_with_different_class(self):
self.client.force_authenticate(user=self.user)
self.student.clazz = Class.objects.create(number=11, letter='V')
response = self.client.get(reverse(self.list_view_name))
self.assertEqual(response.data['results'], [])
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_creation_with_empty_title(self):
self.client.force_authenticate(user=self.user)
self.news.title = ''
post_data = NewsSerializer(self.news).data
response = self.client.post(reverse(self.list_view_name), post_data, format='json')
self.assertEqual(response.data['title'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_short_title(self):
self.client.force_authenticate(user=self.user)
self.news.title = 'ab'
post_data = NewsSerializer(self.news).data
response = self.client.post(reverse(self.list_view_name), post_data, format='json')
self.assertEqual(response.data['title'], ['Ensure this field has at least 3 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_long_title(self):
self.client.force_authenticate(user=self.user)
self.news.title = 'new title' * 120
post_data = NewsSerializer(self.news).data
response = self.client.post(reverse(self.list_view_name), post_data, format='json')
self.assertEqual(
response.data['title'], ['Ensure this field has no more than 100 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_empty_content(self):
self.client.force_authenticate(user=self.user)
self.news.content = ''
post_data = NewsSerializer(self.news).data
response = self.client.post(reverse(self.list_view_name), post_data, format='json')
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_short_content(self):
self.client.force_authenticate(user=self.user)
self.news.content = 'abc'
post_data = NewsSerializer(self.news).data
response = self.client.post(reverse(self.list_view_name), post_data, format='json')
self.assertEqual(response.data['content'], ['Ensure this field has at least 5 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_long_content(self):
self.client.force_authenticate(user=self.user)
self.news.content = 'content' * 10000
post_data = NewsSerializer(self.news).data
response = self.client.post(reverse(self.list_view_name), post_data, format='json')
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 10000 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_valid_data(self):
self.client.force_authenticate(user=self.user)
self.news.title = 'new title'
self.news.content = 'content'
post_data = NewsSerializer(self.news).data
response = self.client.post(reverse(self.list_view_name), post_data, format='json')
self.assertEqual(response.data['title'], self.news.title)
self.assertEqual(response.data['content'], self.news.content)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_news_detail_with_invalid_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(reverse(self.detail_view_name, kwargs={'pk': self.news.id + 1}))
self.assertEqual(response.data['detail'], 'Not found.')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_news_detail_with_valid_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(reverse(self.detail_view_name, kwargs={'pk': self.news.id}))
self.assertEqual(response.data['id'], self.news.id)
self.assertEqual(response.data['title'], self.news.title)
self.assertEqual(response.data['content'], self.news.content)
comments_data = response.data['comments']
self.assertEqual(comments_data[0]['content'], self.comment.content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_update_of_another_user(self):
self.client.force_authenticate(user=self.user)
new_user = User.objects.create(username='test2', password='pass')
new_student = Student.objects.create(user=new_user, clazz=self.clazz)
self.news.author = new_user
self.news.save()
response = self.client.put(
reverse(self.detail_view_name, kwargs={'pk': self.news.id}),
{'title': 'test'},
format='json'
)
self.assertEqual(
response.data['detail'],
'You should be the author of this content in order to modify it.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_news_update_with_empty_title(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(self.detail_view_name, kwargs={'pk': self.news.id}),
{'title': ''},
format='json'
)
self.assertEqual(response.data['title'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_too_short_title(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(self.detail_view_name, kwargs={'pk': self.news.id}),
{'title': 'ab'},
format='json'
)
self.assertEqual(response.data['title'], ['Ensure this field has at least 3 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_too_long_title(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(self.detail_view_name, kwargs={'pk': self.news.id}),
{'title': 'title' * 500},
format='json'
)
self.assertEqual(
response.data['title'], ['Ensure this field has no more than 100 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_empty_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(self.detail_view_name, kwargs={'pk': self.news.id}),
{'content': ''},
format='json'
)
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_too_short_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(self.detail_view_name, kwargs={'pk': self.news.id}),
{'content': 'abc'},
format='json'
)
self.assertEqual(response.data['content'], ['Ensure this field has at least 5 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_too_long_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(self.detail_view_name, kwargs={'pk': self.news.id}),
{'content': 'abc' * 10000},
format='json'
)
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 10000 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_valid_data(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(self.detail_view_name, kwargs={'pk': self.news.id}),
{'title': 'new test title', 'content': 'new test content'},
format='json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_deletion_of_another_user(self):
self.client.force_authenticate(user=self.user)
new_user = User.objects.create(username='test2', password='pass')
new_student = Student.objects.create(user=new_user, clazz=self.clazz)
self.news.author = new_user
self.news.save()
response = self.client.delete(reverse(self.detail_view_name, kwargs={'pk': self.news.id}))
self.assertEqual(
response.data['detail'],
'You should be the author of this content in order to modify it.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_news_deletion_with_invalid_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(self.detail_view_name, kwargs={'pk': self.news.id + 1})
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_news_deletion_with_valid_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(reverse(self.detail_view_name, kwargs={'pk': self.news.id}))
self.assertEqual(News.objects.count(), 0)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class NewsTeachersViewSetTestCase(APITestCase):
def setUp(self):
self.client = APIClient()
self.list_view_name = 'news:teachers_news-list'
self.detail_view_name = 'news:teachers_news-detail'
self.user = User(username='test', email='sisko@gmail.com')
self.user.set_password('password123')
self.user.save()
self.clazz = Class.objects.create(number=10, letter='A')
self.subject = Subject.objects.create(title='Maths')
self.student = Teacher.objects.create(user=self.user, subject=self.subject)
self.news = News.objects.create(
title='test news title',
content='test news content',
class_number=self.clazz.number,
class_letter=self.clazz.letter,
author=self.user,
)
self.comment = Comment.objects.create(
news=self.news,
author=self.user,
content='test comment content'
)
def test_news_list_with_anonymous_user(self):
response = self.client.get(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
)
)
self.assertEqual(response.data['detail'], 'Authentication credentials were not provided.')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_news_detail_with_anonymous_user(self):
response = self.client.get(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
)
)
self.assertEqual(response.data['detail'], 'Authentication credentials were not provided.')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_news_list_with_authenticated_user(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
)
)
self.assertIsNotNone(response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_detail_with_authenticated_user(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
)
)
self.assertIsNotNone(response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_list_with_student_account(self):
student_user = User.objects.create(username='teacher', password='123456')
student = Student.objects.create(user=student_user, clazz=self.clazz)
self.client.force_authenticate(user=student_user)
response = self.client.get(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
}
)
)
self.assertEqual(
response.data['detail'], 'Only teachers are allowed to view and modify this content.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_news_list_with_same_class(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
)
)
results = response.data['results']
self.assertEqual(results[0]['title'], self.news.title)
self.assertEqual(results[0]['content'], self.news.content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_list_with_different_class(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse(
self.list_view_name,
kwargs={
'class_number': 9,
'class_letter': 'V'
}
)
)
self.assertEqual(response.data['results'], [])
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_creation_with_empty_title(self):
self.client.force_authenticate(user=self.user)
self.news.title = ''
post_data = NewsSerializer(self.news).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
),
post_data,
format='json'
)
self.assertEqual(response.data['title'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_short_title(self):
self.client.force_authenticate(user=self.user)
self.news.title = 'ab'
post_data = NewsSerializer(self.news).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
),
post_data,
format='json'
)
self.assertEqual(response.data['title'], ['Ensure this field has at least 3 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_long_title(self):
self.client.force_authenticate(user=self.user)
self.news.title = 'new title' * 120
post_data = NewsSerializer(self.news).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
),
post_data,
format='json'
)
self.assertEqual(
response.data['title'], ['Ensure this field has no more than 100 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_empty_content(self):
self.client.force_authenticate(user=self.user)
self.news.content = ''
post_data = NewsSerializer(self.news).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
),
post_data,
format='json'
)
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_short_content(self):
self.client.force_authenticate(user=self.user)
self.news.content = 'abc'
post_data = NewsSerializer(self.news).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
),
post_data,
format='json'
)
self.assertEqual(response.data['content'], ['Ensure this field has at least 5 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_long_content(self):
self.client.force_authenticate(user=self.user)
self.news.content = 'content' * 10000
post_data = NewsSerializer(self.news).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
),
post_data,
format='json'
)
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 10000 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_valid_data(self):
self.client.force_authenticate(user=self.user)
self.news.title = 'new title'
self.news.content = 'content'
post_data = NewsSerializer(self.news).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter
}
),
post_data,
format='json'
)
self.assertEqual(response.data['title'], self.news.title)
self.assertEqual(response.data['content'], self.news.content)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_news_detail_with_invalid_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id + 1
}
)
)
self.assertEqual(response.data['detail'], 'Not found.')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_news_detail_with_valid_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
)
)
self.assertEqual(response.data['id'], self.news.id)
self.assertEqual(response.data['title'], self.news.title)
self.assertEqual(response.data['content'], self.news.content)
comments_data = response.data['comments']
self.assertEqual(comments_data[0]['content'], self.comment.content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_update_with_empty_title(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
),
{'title': ''},
format='json'
)
self.assertEqual(response.data['title'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_too_short_title(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
),
{'title': 'ab'},
format='json'
)
self.assertEqual(response.data['title'], ['Ensure this field has at least 3 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_too_long_title(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
),
{'title': 'new title' * 100},
format='json'
)
self.assertEqual(
response.data['title'], ['Ensure this field has no more than 100 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_empty_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
),
{'content': ''},
format='json'
)
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_too_short_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
),
{'content': 'ab'},
format='json'
)
self.assertEqual(response.data['content'], ['Ensure this field has at least 5 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_too_long_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
),
{'content': 'new title' * 5000},
format='json'
)
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 10000 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_update_with_valid_data(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
),
{'title': 'new title', 'content': 'new content'},
format='json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_deletion_with_invalid_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id + 2
}
)
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_news_deletion_with_valid_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'pk': self.news.id
}
)
)
self.assertEqual(News.objects.count(), 0)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class NewsTeachersClassNumberListViewTestCase(APITestCase):
def setUp(self):
self.client = APIClient()
self.view_name = 'news:teachers_class_number_list'
self.user1 = User(username='test', email='sisko@gmail.com')
self.user1.set_password('password123')
self.user1.save()
self.user2 = User(username='arigato', email='wen@gmail.com')
self.user2.set_password('qwerty')
self.user2.save()
self.user3 = User(username='teacher', email='teach@gmail.com')
self.user3.set_password('qwerty')
self.user3.save()
self.subject = Subject.objects.create(title='Maths')
self.teacher = Teacher.objects.create(user=self.user3, subject=self.subject)
self.clazz_number = 10
self.clazz1 = Class.objects.create(number=self.clazz_number, letter='A')
self.clazz2 = Class.objects.create(number=self.clazz_number, letter='B')
self.clazz3 = Class.objects.create(number=self.clazz_number + 1, letter='V')
self.student1 = Student.objects.create(user=self.user1, clazz=self.clazz1)
self.student2 = Student.objects.create(user=self.user2, clazz=self.clazz2)
self.news1 = News.objects.create(
title='test news title 1',
content='test news content 1',
class_number=self.clazz1.number,
class_letter=self.clazz1.letter,
author=self.user1,
)
self.news2 = News.objects.create(
title='test news title 2',
content='test news content 2',
class_number=self.clazz2.number,
class_letter=self.clazz2.letter,
author=self.user2,
)
def test_news_list_with_anonymous_user(self):
response = self.client.get(
reverse(self.view_name, kwargs={'class_number': self.clazz_number})
)
self.assertEqual(response.data['detail'], 'Authentication credentials were not provided.')
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_news_list_with_authenticated_user(self):
self.client.force_authenticate(user=self.user3)
response = self.client.get(
reverse(self.view_name, kwargs={'class_number': self.clazz_number})
)
self.assertIsNotNone(response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_news_list_with_student_account(self):
self.client.force_authenticate(user=self.user1)
response = self.client.get(
reverse(self.view_name, kwargs={'class_number': self.clazz_number})
)
self.assertEqual(
response.data['detail'], 'Only teachers are allowed to view and modify this content.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_news_creation_with_empty_title(self):
self.client.force_authenticate(user=self.user3)
self.news1.title = ''
post_data = NewsSerializer(self.news1).data
response = self.client.post(
reverse(self.view_name, kwargs={'class_number': self.clazz_number}),
post_data,
format='json'
)
self.assertEqual(response.data['title'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_short_title(self):
self.client.force_authenticate(user=self.user3)
self.news1.title = 'ab'
post_data = NewsSerializer(self.news1).data
response = self.client.post(
reverse(self.view_name, kwargs={'class_number': self.clazz_number}),
post_data,
format='json'
)
self.assertEqual(
response.data['title'], ['Ensure this field has at least 3 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_long_title(self):
self.client.force_authenticate(user=self.user3)
self.news1.title = 'test' * 50
post_data = NewsSerializer(self.news1).data
response = self.client.post(
reverse(self.view_name, kwargs={'class_number': self.clazz_number}),
post_data,
format='json'
)
self.assertEqual(
response.data['title'], ['Ensure this field has no more than 100 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_empty_content(self):
self.client.force_authenticate(user=self.user3)
self.news1.content = ''
post_data = NewsSerializer(self.news1).data
response = self.client.post(
reverse(self.view_name, kwargs={'class_number': self.clazz_number}),
post_data,
format='json'
)
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_short_content(self):
self.client.force_authenticate(user=self.user3)
self.news1.content = 'abc'
post_data = NewsSerializer(self.news1).data
response = self.client.post(
reverse(self.view_name, kwargs={'class_number': self.clazz_number}),
post_data,
format='json'
)
self.assertEqual(response.data['content'], ['Ensure this field has at least 5 characters.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_too_long_content(self):
self.client.force_authenticate(user=self.user3)
self.news1.content = 'abc' * 10000
post_data = NewsSerializer(self.news1).data
response = self.client.post(
reverse(self.view_name, kwargs={'class_number': self.clazz_number}),
post_data,
format='json'
)
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 10000 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_news_creation_with_valid_data(self):
self.client.force_authenticate(user=self.user3)
self.news1.title = 'new title'
self.news1.content = 'new content'
post_data = NewsSerializer(self.news1).data
response = self.client.post(
reverse(self.view_name, kwargs={'class_number': self.clazz_number}),
post_data,
format='json'
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
class NewsStudentsCommentsViewSetTestCase(APITestCase):
def setUp(self):
self.client = APIClient()
self.list_view_name = 'news:students_news_comments-list'
self.detail_view_name = 'news:students_news_comments-detail'
self.user = User.objects.create(username='test1', password='pass')
self.clazz = Class.objects.create(number=10, letter='A')
self.student = Student.objects.create(user=self.user, clazz=self.clazz)
self.news = News.objects.create(
title='test news title',
content='test news content',
class_number=self.clazz.number,
class_letter=self.clazz.letter,
author=self.user,
)
self.comment = Comment.objects.create(
news=self.news,
author=self.user,
content='test comment content'
)
def test_comment_creation_with_empty_content(self):
self.client.force_authenticate(user=self.user)
self.comment.content = ''
post_data = CommentSerializer(self.comment).data
response = self.client.post(
reverse(self.list_view_name, kwargs={'students_news_pk': self.news.id}),
post_data,
format='json'
)
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_comment_creation_with_too_long_content(self):
self.client.force_authenticate(user=self.user)
self.comment.content = 'content' * 1024
post_data = CommentSerializer(self.comment).data
response = self.client.post(
reverse(self.list_view_name, kwargs={'students_news_pk': self.news.id}),
post_data,
format='json'
)
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 2048 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_comment_creation_with_valid_content(self):
self.client.force_authenticate(user=self.user)
self.comment.content = 'new content'
post_data = CommentSerializer(self.comment).data
response = self.client.post(
reverse(self.list_view_name, kwargs={'students_news_pk': self.news.id}),
post_data,
format='json'
)
self.assertEqual(response.data['content'], self.comment.content)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_comment_update_with_empty_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={'students_news_pk': self.news.id, 'pk': self.comment.id}
),
{'content': ''},
format='json'
)
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_comment_update_of_another_user(self):
self.client.force_authenticate(user=self.user)
new_user = User.objects.create(username='test2', password='pass')
new_student = Student.objects.create(user=new_user, clazz=self.clazz)
self.comment.author = new_user
self.comment.save()
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={'students_news_pk': self.news.id, 'pk': self.comment.id}
),
{'content': 'new test comment content'},
format='json'
)
self.assertEqual(
response.data['detail'],
'You should be the author of this content in order to modify it.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_comment_update_with_too_long_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={'students_news_pk': self.news.id, 'pk': self.comment.id}
),
{'content': 'title' * 1024},
format='json'
)
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 2048 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_comment_update_with_valid_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={'students_news_pk': self.news.id, 'pk': self.comment.id}
),
{'content': 'updated content'},
format='json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_comment_deletion_with_invalid_news_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={'students_news_pk': self.news.id + 1, 'pk': self.comment.id}
)
)
self.assertEqual(response.data['detail'], 'Not found.')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_comment_deletion_with_invalid_comment_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={'students_news_pk': self.news.id, 'pk': self.comment.id + 1}
)
)
self.assertEqual(response.data['detail'], 'Not found.')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_comment_deletion_of_another_user(self):
self.client.force_authenticate(user=self.user)
new_user = User.objects.create(username='test3', password='pass')
new_student = Student.objects.create(user=new_user, clazz=self.clazz)
self.comment.author = new_user
self.comment.save()
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={'students_news_pk': self.news.id, 'pk': self.comment.id}
)
)
self.assertEqual(
response.data['detail'],
'You should be the author of this content in order to modify it.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_comment_deletion_with_valid_ids(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={'students_news_pk': self.news.id, 'pk': self.comment.id}
)
)
self.assertEqual(Comment.objects.count(), 0)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class NewsTeachersCommentsViewSetTestCase(APITestCase):
def setUp(self):
self.client = APIClient()
self.list_view_name = 'news:teachers_news_comments-list'
self.detail_view_name = 'news:teachers_news_comments-detail'
self.user = User.objects.create(username='test1', password='pass')
self.clazz = Class.objects.create(number=10, letter='A')
self.subject = Subject.objects.create(title='Literature')
self.teacher = Teacher.objects.create(user=self.user, subject=self.subject)
self.news = News.objects.create(
title='test news title',
content='test news content',
class_number=self.clazz.number,
class_letter=self.clazz.letter,
author=self.user,
)
self.comment = Comment.objects.create(
news=self.news,
author=self.user,
content='test comment content'
)
def test_comment_creation_with_empty_content(self):
self.client.force_authenticate(user=self.user)
self.comment.content = ''
post_data = CommentSerializer(self.comment).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id
}
),
post_data,
format='json'
)
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_comment_creation_with_too_long_content(self):
self.client.force_authenticate(user=self.user)
self.comment.content = 'content' * 1024
post_data = CommentSerializer(self.comment).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id
}
),
post_data,
format='json'
)
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 2048 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_comment_creation_with_valid_content(self):
self.client.force_authenticate(user=self.user)
self.comment.content = 'This is a very nice platorm, man!'
post_data = CommentSerializer(self.comment).data
response = self.client.post(
reverse(
self.list_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id
}
),
post_data,
format='json'
)
self.assertEqual(response.data['content'], self.comment.content)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_comment_update_with_empty_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id,
'pk': self.comment.id
}
),
{'content': ''},
format='json'
)
self.assertEqual(response.data['content'], ['This field may not be blank.'])
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_comment_update_of_another_user(self):
self.client.force_authenticate(user=self.user)
new_user = User.objects.create(username='test2', password='pass')
Teacher.objects.create(user=new_user, subject=self.subject)
self.comment.author = new_user
self.comment.save()
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id,
'pk': self.comment.id
}
),
{'content': 'new test content'},
format='json'
)
self.assertEqual(
response.data['detail'],
'You should be the author of this content in order to modify it.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_comment_update_with_too_long_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id,
'pk': self.comment.id
}
),
{'content': 'new test content' * 1024},
format='json'
)
self.assertEqual(
response.data['content'], ['Ensure this field has no more than 2048 characters.']
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_comment_update_with_valid_content(self):
self.client.force_authenticate(user=self.user)
response = self.client.put(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id,
'pk': self.comment.id
}
),
{'content': 'new test content'},
format='json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_comment_deletion_with_invalid_news_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id + 1,
'pk': self.comment.id
}
)
)
self.assertEqual(response.data['detail'], 'Not found.')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_comment_deletion_with_invalid_comment_id(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id,
'pk': self.comment.id + 5
}
)
)
self.assertEqual(response.data['detail'], 'Not found.')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_comment_deletion_of_another_user(self):
self.client.force_authenticate(user=self.user)
new_user = User.objects.create(username='test3', password='pass')
Teacher.objects.create(user=new_user, subject=self.subject)
self.comment.author = new_user
self.comment.save()
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id,
'pk': self.comment.id
}
)
)
self.assertEqual(
response.data['detail'],
'You should be the author of this content in order to modify it.'
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_comment_deletion_with_valid_ids(self):
self.client.force_authenticate(user=self.user)
response = self.client.delete(
reverse(
self.detail_view_name,
kwargs={
'class_number': self.clazz.number,
'class_letter': self.clazz.letter,
'teachers_news_pk': self.news.id,
'pk': self.comment.id
}
)
)
self.assertEqual(Comment.objects.count(), 0)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
| 36.136147
| 100
| 0.60587
| 5,933
| 53,084
| 5.207821
| 0.03135
| 0.054372
| 0.116124
| 0.07884
| 0.957376
| 0.949447
| 0.947958
| 0.941744
| 0.923199
| 0.922099
| 0
| 0.012895
| 0.287111
| 53,084
| 1,468
| 101
| 36.160763
| 0.803583
| 0
| 0
| 0.781541
| 0
| 0
| 0.109638
| 0.004879
| 0
| 0
| 0
| 0
| 0.144793
| 1
| 0.07536
| false
| 0.012701
| 0.005927
| 0
| 0.085521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95bdcb0b6a3b61668bb0119c962afa2853f2de50
| 172,713
|
py
|
Python
|
tests/test_docker.py
|
theoden-dd/fabricio
|
ff09968a58bf6f98966e24bb2fbda5aa598f9412
|
[
"MIT"
] | 291
|
2016-04-02T09:25:05.000Z
|
2022-01-07T04:54:25.000Z
|
tests/test_docker.py
|
theoden-dd/fabricio
|
ff09968a58bf6f98966e24bb2fbda5aa598f9412
|
[
"MIT"
] | 90
|
2016-04-02T09:38:00.000Z
|
2021-06-04T09:39:59.000Z
|
tests/test_docker.py
|
theoden-dd/fabricio
|
ff09968a58bf6f98966e24bb2fbda5aa598f9412
|
[
"MIT"
] | 37
|
2016-07-09T00:25:37.000Z
|
2021-06-03T15:16:52.000Z
|
# coding: utf-8
import json
import shlex
from collections import OrderedDict
import mock
import unittest2 as unittest
from fabric import api as fab
import fabricio
from fabricio import docker
from fabricio.docker.container import Option, Attribute
from tests import SucceededResult, docker_run_args_parser, \
docker_service_update_args_parser, \
docker_entity_inspect_args_parser, docker_inspect_args_parser, \
docker_service_create_args_parser, args_parser, Command, FailedResult
class TestContainer(docker.Container):
image = docker.Image('image:tag')
class ContainerTestCase(unittest.TestCase):
maxDiff = None
def test_options(self):
cases = dict(
default=dict(
kwargs=dict(),
expected={},
),
custom=dict(
kwargs=dict(options=dict(foo='bar')),
expected={
'foo': 'bar',
},
),
collision=dict(
kwargs=dict(options=dict(execute='execute')),
expected={
'execute': 'execute',
},
),
override=dict(
kwargs=dict(options=dict(env='custom_env')),
expected={
'env': 'custom_env',
},
),
complex=dict(
kwargs=dict(options=dict(
env='custom_env',
user=lambda service: 'user',
foo='foo',
bar=lambda service: 'bar',
)),
expected={
'env': 'custom_env',
'user': 'user',
'foo': 'foo',
'bar': 'bar',
},
),
)
for case, data in cases.items():
with self.subTest(case=case):
container = TestContainer(**data['kwargs'])
self.assertDictEqual(data['expected'], dict(container.options))
def test_safe_options(self):
class TestService(docker.BaseService):
option = docker.Option()
safe_option = docker.Option(safe=True, name='safe-option')
safe_overridden = docker.Option(safe=True)
another_safe_option = docker.Option(safe_name='another-safe-option')
service = TestService(
options=dict(option=42, safe_option=42, another_safe_option=42, safe_overridden=42),
safe_options=dict(foo='bar', option='hello', dyn=lambda s: 'dyn', safe_overridden='override'),
)
self.assertDictEqual(
{
'safe-option': 42,
'another-safe-option': 42,
'foo': 'bar',
'option': 'hello',
'dyn': 'dyn',
'safe_overridden': 'override',
},
dict(service.safe_options),
)
def test_options_inheritance(self):
class Parent(docker.Container):
user = 'user' # overridden property (simple)
@property # overridden property (dynamic)
def publish(self):
return 'ports'
baz = Option(default=42) # new property
@Option # new dynamic property
def foo(self):
return 'bar'
@Option() # new dynamic property
def foo2(self):
return 'bar2'
@Option(default='not_used') # new dynamic property
def foo3(self):
return 'bar3'
null = Option() # new empty property
@Option(name='real-name')
def alias(self):
return 'value'
@Option(name='real-name2')
def overridden_alias(self):
return 'value'
@Option(name='real-name3')
def overridden_alias2(self):
return 'value'
class Child(Parent):
overridden_alias = 'overridden_value'
@Option(name='overridden-name')
def overridden_alias2(self):
return 'overridden_value'
container = Child()
self.assertIn('user', container.options)
self.assertEqual(container.options['user'], 'user')
container.user = 'fabricio'
self.assertEqual(container.options['user'], 'fabricio')
self.assertIn('publish', container.options)
self.assertEqual(container.options['publish'], 'ports')
self.assertIn('baz', container.options)
self.assertEqual(container.options['baz'], 42)
container.baz = 101
self.assertEqual(container.options['baz'], 101)
self.assertIn('foo', container.options)
self.assertEqual(container.options['foo'], 'bar')
container.foo = 'baz'
self.assertEqual(container.options['foo'], 'baz')
self.assertIn('foo2', container.options)
self.assertEqual(container.options['foo2'], 'bar2')
container.foo2 = 'baz2'
self.assertEqual(container.options['foo2'], 'baz2')
self.assertIn('foo3', container.options)
self.assertEqual(container.options['foo3'], 'bar3')
container.foo3 = 'baz3'
self.assertEqual(container.options['foo3'], 'baz3')
self.assertIn('real-name', container.options)
self.assertEqual(container.options['real-name'], 'value')
container.alias = 'another_value'
self.assertEqual(container.options['real-name'], 'another_value')
self.assertIn('real-name2', container.options)
self.assertEqual(container.options['real-name2'], 'overridden_value')
container.overridden_alias = 'another_value'
self.assertEqual(container.options['real-name2'], 'another_value')
self.assertIn('overridden-name', container.options)
self.assertEqual(container.options['overridden-name'], 'overridden_value')
container.overridden_alias2 = 'another_value'
self.assertEqual(container.options['overridden-name'], 'another_value')
self.assertNotIn('null', container.options)
container.null = 'value'
self.assertIn('null', container.options)
self.assertEqual(container.options['null'], 'value')
def test_attributes_inheritance(self):
class Container(docker.Container):
command = 'command' # overridden property (simple)
@property # overridden property (dynamic)
def stop_timeout(self):
return 1001
baz = Attribute(default=42) # new property
@Attribute # new dynamic property
def foo(self):
return 'bar'
@Attribute() # new dynamic property
def foo2(self):
return 'bar2'
@Attribute(default='not_used') # new dynamic property
def foo3(self):
return 'bar3'
null = Attribute() # new empty property
container = Container()
self.assertEqual(container.command, 'command')
container.command = 'command2'
self.assertEqual(container.command, 'command2')
self.assertEqual(container.stop_timeout, 1001)
self.assertEqual(container.baz, 42)
container.baz = 101
self.assertEqual(container.baz, 101)
self.assertEqual(container.foo, 'bar')
container.foo = 'baz'
self.assertEqual(container.foo, 'baz')
self.assertEqual(container.foo2, 'bar2')
container.foo2 = 'baz2'
self.assertEqual(container.foo2, 'baz2')
self.assertEqual(container.foo3, 'bar3')
container.foo3 = 'baz3'
self.assertEqual(container.foo3, 'baz3')
self.assertIsNone(container.null)
container.null = 'value'
self.assertEqual(container.null, 'value')
def test_container_does_not_allow_modify_options(self):
container = TestContainer()
# default options allowed to be modified
container.user = 'user'
self.assertEqual('user', container.user)
# do not allow to modify additional options
with self.assertRaises(TypeError):
container.options['some-option'] = 'value'
def test_container_raises_error_on_unknown_attr(self):
with self.assertRaises(TypeError):
docker.Container(name='name', unknown_attr='foo')
def test_info(self):
return_value = SucceededResult('[{"Id": "123", "Image": "abc"}]')
expected = dict(Id='123', Image='abc')
container = docker.Container(name='name')
expected_command = 'docker inspect --type container name'
with mock.patch.object(
fabricio,
'run',
return_value=return_value,
) as run:
self.assertEqual(expected, container.info)
run.assert_called_once_with(
expected_command,
abort_exception=docker.ContainerNotFoundError,
)
def test_delete(self):
cases = dict(
regular=dict(
delete_kwargs=dict(),
expected_commands=[
mock.call('docker rm --volumes name'),
],
),
with_image=dict(
delete_kwargs=dict(delete_image=True),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rm --volumes name'),
mock.call('docker rmi image_id', ignore_errors=True),
],
),
forced=dict(
delete_kwargs=dict(force=True),
expected_commands=[
mock.call('docker rm --force --volumes name'),
],
),
no_dangling_removal_deprecated=dict(
delete_kwargs=dict(delete_dangling_volumes=False),
expected_commands=[
mock.call('docker rm name'),
],
),
no_volumes=dict(
delete_kwargs=dict(volumes=False),
expected_commands=[
mock.call('docker rm name'),
],
),
complex=dict(
delete_kwargs=dict(force=True, delete_image=True),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rm --force --volumes name'),
mock.call('docker rmi image_id', ignore_errors=True),
],
),
)
for case, params in cases.items():
with self.subTest(case=case):
container = docker.Container(name='name')
with mock.patch.object(
fabricio,
'run',
return_value=SucceededResult('[{"Image": "image_id"}]'),
) as run:
expected_commands = params['expected_commands']
delete_kwargs = params['delete_kwargs']
container.delete(**delete_kwargs)
self.assertListEqual(run.mock_calls, expected_commands)
def test_execute(self):
container = docker.Container(name='name')
expected_command = 'docker exec --tty --interactive name command'
with mock.patch.object(
fabricio,
'run',
return_value='result'
) as run:
result = container.execute('command')
run.assert_called_once_with(
expected_command,
quiet=True,
use_cache=False,
)
self.assertEqual('result', result)
def test_start(self):
container = docker.Container(name='name')
expected_command = 'docker start name'
with mock.patch.object(fabricio, 'run') as run:
container.start()
run.assert_called_once_with(expected_command)
def test_stop(self):
cases = dict(
default=dict(
timeout=None,
expected_command='docker stop --time 10 name',
),
positive_timeout=dict(
timeout=30,
expected_command='docker stop --time 30 name',
),
zero_timeout=dict(
timeout=0,
expected_command='docker stop --time 0 name',
),
)
for case, data in cases.items():
with self.subTest(case=case):
container = docker.Container(name='name')
with mock.patch.object(fabricio, 'run') as run:
container.stop(timeout=data['timeout'])
run.assert_called_once_with(data['expected_command'])
def test_reload(self):
cases = dict(
default=dict(
timeout=None,
expected_command='docker restart --time 10 name',
),
positive_timeout=dict(
timeout=30,
expected_command='docker restart --time 30 name',
),
zero_timeout=dict(
timeout=0,
expected_command='docker restart --time 0 name',
),
)
for case, data in cases.items():
with self.subTest(case=case):
container = docker.Container(name='name')
with mock.patch.object(fabricio, 'run') as run:
container.reload(timeout=data['timeout'])
run.assert_called_once_with(data['expected_command'])
def test_rename(self):
container = docker.Container(name='name')
expected_command = 'docker rename name new_name'
with mock.patch.object(fabricio, 'run') as run:
container.rename('new_name')
run.assert_called_once_with(expected_command)
self.assertEqual('new_name', container.name)
def test_signal(self):
container = docker.Container(name='name')
expected_command = 'docker kill --signal SIGTERM name'
with mock.patch.object(fabricio, 'run') as run:
container.signal('SIGTERM')
run.assert_called_once_with(expected_command)
def test_run(self):
cases = dict(
basic=dict(
init_kwargs=dict(
name='name',
),
class_kwargs=dict(image=docker.Image('image:tag')),
expected_command='docker run --name name --detach image:tag ',
expected_args={
'executable': ['docker'],
'run_or_create': ['run'],
'name': 'name',
'detach': True,
'image': 'image:tag',
'command': [],
},
),
complex=dict(
init_kwargs=dict(
name='name',
options={
'custom-option': 'foo',
'restart': 'override',
},
),
class_kwargs=dict(
image=docker.Image('image:tag'),
command='command',
user='user',
publish=['80:80', '443:443'],
env=['FOO=foo', 'BAR=bar'],
volume=['/tmp:/tmp', '/root:/root:ro'],
link=['db:db'],
add_host=['host:192.168.0.1'],
network='network',
restart='restart_policy',
stop_signal='stop_signal',
),
expected_args={
'executable': ['docker'],
'run_or_create': ['run'],
'user': 'user',
'publish': ['80:80', '443:443'],
'env': ['FOO=foo', 'BAR=bar'],
'volume': ['/tmp:/tmp', '/root:/root:ro'],
'link': ['db:db'],
'add-host': ['host:192.168.0.1'],
'net': 'network',
'restart': 'override',
'stop-signal': 'stop_signal',
'name': 'name',
'detach': True,
'custom-option': 'foo',
'image': 'image:tag',
'command': ['command'],
},
),
)
def test_command(command, *args, **kwargs):
options = docker_run_args_parser.parse_args(shlex.split(command))
self.assertDictEqual(vars(options), params['expected_args'])
for case, params in cases.items():
with self.subTest(case=case):
init_kwargs = params['init_kwargs']
class_kwargs = params['class_kwargs']
Container = type(docker.Container)(
'Container',
(docker.Container, ),
class_kwargs,
)
container = Container(**init_kwargs)
with mock.patch.object(fabricio, 'run', side_effect=test_command):
container.run()
def test_fork(self):
cases = dict(
default=dict(
init_kwargs=dict(name='name'),
fork_kwargs=dict(),
expected_properties=dict(
name='name',
command=None,
options={},
),
),
predefined_default=dict(
init_kwargs=dict(
name='name',
options=dict(user='fabricio', foo='baz'),
image='image:tag',
command='fab',
),
fork_kwargs=dict(),
expected_properties=dict(
name='name',
command='fab',
options={
'user': 'fabricio',
'foo': 'baz',
},
),
expected_image='image:tag',
),
override_name=dict(
init_kwargs=dict(name='name'),
fork_kwargs=dict(name='another_name'),
expected_properties=dict(
name='another_name',
command=None,
options={},
),
),
override_command=dict(
init_kwargs=dict(name='name'),
fork_kwargs=dict(command='command'),
expected_properties=dict(
name='name',
command='command',
options={},
),
),
override_image_str=dict(
init_kwargs=dict(name='name'),
fork_kwargs=dict(image='image'),
expected_properties=dict(
name='name',
command=None,
options={},
),
expected_image='image:latest',
),
override_image_instance=dict(
init_kwargs=dict(name='name'),
fork_kwargs=dict(image=docker.Image('image')),
expected_properties=dict(
name='name',
command=None,
options={},
),
expected_image='image:latest',
),
override_default_option=dict(
init_kwargs=dict(name='name'),
fork_kwargs=dict(options=dict(user='user')),
expected_properties=dict(
name='name',
command=None,
options={
'user': 'user',
},
),
),
override_custom_option=dict(
init_kwargs=dict(name='name'),
fork_kwargs=dict(options=dict(foo='bar')),
expected_properties=dict(
name='name',
command=None,
options={
'foo': 'bar',
},
),
),
overrride_complex=dict(
init_kwargs=dict(name='name'),
fork_kwargs=dict(
options=dict(foo='bar', user='user'),
image='image',
command='command',
name='another_name',
),
expected_properties=dict(
name='another_name',
command='command',
options={
'user': 'user',
'foo': 'bar',
},
),
expected_image='image:latest',
),
predefined_override_command=dict(
init_kwargs=dict(
name='name',
options=dict(user='fabricio', foo='baz'),
image='image:tag',
command='fab',
),
fork_kwargs=dict(command='command'),
expected_properties=dict(
name='name',
command='command',
options={
'user': 'fabricio',
'foo': 'baz',
},
),
expected_image='image:tag',
),
predefined_override_image_str=dict(
init_kwargs=dict(
name='name',
options=dict(user='fabricio', foo='baz'),
image='image:tag',
command='fab',
),
fork_kwargs=dict(image='image'),
expected_properties=dict(
name='name',
command='fab',
options={
'user': 'fabricio',
'foo': 'baz',
},
),
expected_image='image:latest',
),
predefined_override_image_instance=dict(
init_kwargs=dict(
name='name',
options=dict(user='fabricio', foo='baz'),
image='image:tag',
command='fab',
),
fork_kwargs=dict(image=docker.Image('image')),
expected_properties=dict(
name='name',
command='fab',
options={
'user': 'fabricio',
'foo': 'baz',
},
),
expected_image='image:latest',
),
predefined_override_default_option=dict(
init_kwargs=dict(
name='name',
options=dict(user='fabricio', foo='baz'),
image='image:tag',
command='fab',
),
fork_kwargs=dict(options=dict(user='user')),
expected_properties=dict(
name='name',
command='fab',
options={
'user': 'user',
'foo': 'baz',
},
),
expected_image='image:tag',
),
predefined_override_custom_option=dict(
init_kwargs=dict(
name='name',
options=dict(user='fabricio', foo='baz'),
image='image:tag',
command='fab',
),
fork_kwargs=dict(options=dict(foo='bar')),
expected_properties=dict(
name='name',
command='fab',
options={
'user': 'fabricio',
'foo': 'bar',
},
),
expected_image='image:tag',
),
predefined_overrride_complex=dict(
init_kwargs=dict(
name='name',
options=dict(user='fabricio', foo='baz', hello=42),
image='image:tag',
command='fab',
),
fork_kwargs=dict(
options=dict(foo='bar', user='user'),
image='image',
command='command',
name='another_name',
),
expected_properties=dict(
name='another_name',
command='command',
options={
'user': 'user',
'foo': 'bar',
'hello': 42,
},
),
expected_image='image:latest',
),
)
for case, data in cases.items():
with self.subTest(case=case):
container = docker.Container(**data['init_kwargs'])
forked_container = container.fork(**data['fork_kwargs'])
expected_image = data.get('expected_image')
if expected_image:
self.assertEqual(repr(forked_container.image), expected_image)
for prop, value in data['expected_properties'].items():
self.assertEqual(value, getattr(forked_container, prop))
@mock.patch.object(fabricio, 'log')
def test_update(self, *args):
cases = dict(
no_change=dict(
side_effect=(
SucceededResult('[{"Image": "image_id"}]'), # current container info
SucceededResult('[{"Id": "image_id"}]'), # new image info
SucceededResult(), # force starting container
),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker inspect --type image image:tag', abort_exception=docker.ImageNotFoundError),
mock.call('docker start name'),
],
update_kwargs=dict(),
excpected_result=False,
),
no_change_with_tag=dict(
side_effect=(
SucceededResult('[{"Image": "image_id"}]'), # current container info
SucceededResult('[{"Id": "image_id"}]'), # new image info
SucceededResult(), # force starting container
),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker inspect --type image image:foo', abort_exception=docker.ImageNotFoundError),
mock.call('docker start name'),
],
update_kwargs=dict(tag='foo'),
excpected_result=False,
),
forced=dict(
side_effect=(
SucceededResult('[{"Image": "image_id"}]'), # obsolete container info
SucceededResult(), # delete obsolete container
SucceededResult(), # delete obsolete container image
SucceededResult(), # rename current container
SucceededResult(), # stop current container
SucceededResult('new_container_id'), # run new container
),
expected_commands=[
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rm --volumes name_backup'),
mock.call('docker rmi image_id', ignore_errors=True),
mock.call('docker rename name name_backup'),
mock.call('docker stop --time 10 name_backup'),
mock.call(Command(docker_run_args_parser, {
'executable': ['docker'],
'run_or_create': ['run'],
'detach': True,
'name': 'name',
'image': 'image:tag',
'command': [],
}), quiet=True),
],
update_kwargs=dict(force=True),
excpected_result=True,
),
regular=dict(
side_effect=(
SucceededResult('[{"Image": "image_id"}]'), # current container info
SucceededResult('[{"Id": "new_image_id"}]'), # new image info
SucceededResult('[{"Image": "old_image_id"}]'), # obsolete container info
SucceededResult(), # delete obsolete container
SucceededResult(), # delete obsolete container image
SucceededResult(), # rename current container
SucceededResult(), # stop current container
SucceededResult('new_container_id'), # run new container
),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker inspect --type image image:tag', abort_exception=docker.ImageNotFoundError),
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rm --volumes name_backup'),
mock.call('docker rmi old_image_id', ignore_errors=True),
mock.call('docker rename name name_backup'),
mock.call('docker stop --time 10 name_backup'),
mock.call(Command(docker_run_args_parser, {
'executable': ['docker'],
'run_or_create': ['run'],
'detach': True,
'name': 'name',
'image': 'image:tag',
'command': [],
}), quiet=True),
],
update_kwargs=dict(),
excpected_result=True,
),
regular_with_tag=dict(
side_effect=(
SucceededResult('[{"Image": "image_id"}]'), # current container info
SucceededResult('[{"Id": "new_image_id"}]'), # new image info
SucceededResult('[{"Image": "old_image_id"}]'), # obsolete container info
SucceededResult(), # delete obsolete container
SucceededResult(), # delete obsolete container image
SucceededResult(), # rename current container
SucceededResult(), # stop current container
SucceededResult('new_container_id'), # run new container
),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker inspect --type image image:foo', abort_exception=docker.ImageNotFoundError),
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rm --volumes name_backup'),
mock.call('docker rmi old_image_id', ignore_errors=True),
mock.call('docker rename name name_backup'),
mock.call('docker stop --time 10 name_backup'),
mock.call(Command(docker_run_args_parser, {
'executable': ['docker'],
'run_or_create': ['run'],
'detach': True,
'name': 'name',
'image': 'image:foo',
'command': [],
}), quiet=True),
],
update_kwargs=dict(tag='foo'),
excpected_result=True,
),
regular_with_registry=dict(
side_effect=(
SucceededResult('[{"Image": "image_id"}]'), # current container info
SucceededResult('[{"Id": "new_image_id"}]'), # new image info
SucceededResult('[{"Image": "old_image_id"}]'), # obsolete container info
SucceededResult(), # delete obsolete container
SucceededResult(), # delete obsolete container image
SucceededResult(), # rename current container
SucceededResult(), # stop current container
SucceededResult('new_container_id'), # run new container
),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker inspect --type image registry/image:tag', abort_exception=docker.ImageNotFoundError),
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rm --volumes name_backup'),
mock.call('docker rmi old_image_id', ignore_errors=True),
mock.call('docker rename name name_backup'),
mock.call('docker stop --time 10 name_backup'),
mock.call(Command(docker_run_args_parser, {
'executable': ['docker'],
'run_or_create': ['run'],
'detach': True,
'name': 'name',
'image': 'registry/image:tag',
'command': [],
}), quiet=True),
],
update_kwargs=dict(registry='registry'),
excpected_result=True,
),
regular_complex=dict( # TODO add more options
side_effect=(
SucceededResult('[{"Image": "image_id"}]'), # current container info
SucceededResult('[{"Id": "new_image_id"}]'), # new image info
SucceededResult('[{"Image": "old_image_id"}]'), # obsolete container info
SucceededResult(), # delete obsolete container
SucceededResult(), # delete obsolete container image
SucceededResult(), # rename current container
SucceededResult(), # stop current container
SucceededResult('new_container_id'), # run new container
),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker inspect --type image registry/account/image:foo', abort_exception=docker.ImageNotFoundError),
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rm --volumes name_backup'),
mock.call('docker rmi old_image_id', ignore_errors=True),
mock.call('docker rename name name_backup'),
mock.call('docker stop --time 10 name_backup'),
mock.call(Command(docker_run_args_parser, {
'executable': ['docker'],
'run_or_create': ['run'],
'detach': True,
'name': 'name',
'image': 'registry/account/image:foo',
'command': [],
}), quiet=True),
],
update_kwargs=dict(tag='foo', registry='registry', account='account'),
excpected_result=True,
),
regular_without_backup_container=dict(
side_effect=(
SucceededResult('[{"Image": "image_id"}]'), # current container info
SucceededResult('[{"Id": "new_image_id"}]'), # new image info
docker.ContainerNotFoundError, # obsolete container info
SucceededResult(), # rename current container
SucceededResult(), # stop current container
SucceededResult('new_container_id'), # run new container
),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker inspect --type image image:tag', abort_exception=docker.ImageNotFoundError),
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rename name name_backup'),
mock.call('docker stop --time 10 name_backup'),
mock.call(Command(docker_run_args_parser, {
'executable': ['docker'],
'run_or_create': ['run'],
'detach': True,
'name': 'name',
'image': 'image:tag',
'command': [],
}), quiet=True),
],
update_kwargs=dict(),
excpected_result=True,
),
forced_without_backup_container=dict(
side_effect=(
docker.ContainerNotFoundError, # obsolete container info
SucceededResult(), # rename current container
SucceededResult(), # stop current container
SucceededResult('new_container_id'), # run new container
),
expected_commands=[
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rename name name_backup'),
mock.call('docker stop --time 10 name_backup'),
mock.call(Command(docker_run_args_parser, {
'executable': ['docker'],
'run_or_create': ['run'],
'detach': True,
'name': 'name',
'image': 'image:tag',
'command': [],
}), quiet=True),
],
update_kwargs=dict(force=True),
excpected_result=True,
),
from_scratch=dict(
side_effect=(
docker.ContainerNotFoundError, # current container info
docker.ContainerNotFoundError, # obsolete container info
fabricio.Error, # rename current container
SucceededResult('new_container_id'), # run new container
),
expected_commands=[
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rename name name_backup'),
mock.call(Command(docker_run_args_parser, {
'executable': ['docker'],
'run_or_create': ['run'],
'detach': True,
'name': 'name',
'image': 'image:tag',
'command': [],
}), quiet=True),
],
update_kwargs=dict(),
excpected_result=True,
),
)
for case, params in cases.items():
with self.subTest(case=case):
container = TestContainer(name='name')
side_effect = params['side_effect']
expected_commands = params['expected_commands']
update_kwargs = params['update_kwargs']
excpected_result = params['excpected_result']
with mock.patch.object(
fabricio,
'run',
side_effect=side_effect,
) as run:
result = container.update(**update_kwargs)
self.assertEqual('name', container.name)
self.assertListEqual(run.mock_calls, expected_commands)
self.assertEqual(excpected_result, result)
def test_revert(self):
side_effect = (
SucceededResult('[{"Image": "backup_image_id"}]'), # backup container info
SucceededResult(), # stop current container
SucceededResult(), # start backup container
SucceededResult('[{"Image": "failed_image_id"}]'), # current container info
SucceededResult(), # delete current container
SucceededResult(), # delete current container image
SucceededResult(), # rename backup container
)
expected_commands = [
mock.call('docker inspect --type container name_backup', abort_exception=docker.ContainerNotFoundError),
mock.call('docker stop --time 10 name'),
mock.call('docker start name_backup'),
mock.call('docker inspect --type container name', abort_exception=docker.ContainerNotFoundError),
mock.call('docker rm --volumes name'),
mock.call('docker rmi failed_image_id', ignore_errors=True),
mock.call('docker rename name_backup name'),
]
container = TestContainer(name='name')
with mock.patch.object(fabricio, 'run', side_effect=side_effect) as run:
container.revert()
self.assertListEqual(run.mock_calls, expected_commands)
@mock.patch.object(
docker.Container,
'info',
new_callable=mock.PropertyMock,
side_effect=docker.ContainerNotFoundError,
)
@mock.patch.object(fabricio, 'run')
def test_revert_raises_error_if_backup_container_not_found(self, run, *args):
container = docker.Container(name='name')
with self.assertRaises(docker.ContainerError):
container.revert()
run.assert_not_called()
class ImageTestCase(unittest.TestCase):
maxDiff = None
def test___init___can_take_another_image_as_argument(self):
cases = dict(
default_image=dict(
source_image=docker.Image(),
name=None,
tag=None,
registry=None,
),
filled_image_1=dict(
source_image=docker.Image(name='name', tag='tag', registry='registry:5000'),
name='name',
tag='tag',
registry='registry:5000',
repr='registry:5000/name:tag',
),
filled_image_2=dict(
source_image=docker.Image('registry:5000/name:tag'),
name='name',
tag='tag',
registry='registry:5000',
repr='registry:5000/name:tag',
),
digest=dict(
source_image=docker.Image('registry:5000/name@digest'),
name='name',
tag='digest',
registry='registry:5000',
repr='registry:5000/name@digest',
digest='registry:5000/name@digest',
),
from_container=dict(
source_image=docker.Container(image='registry:5000/name:tag').image,
name='name',
tag='tag',
registry='registry:5000',
repr='registry:5000/name:tag',
),
digest_from_container=dict(
source_image=docker.Container(image='registry:5000/name@digest').image,
name='name',
tag='digest',
registry='registry:5000',
repr='registry:5000/name@digest',
digest='registry:5000/name@digest',
),
)
for case, data in cases.items():
with self.subTest(case=case):
image = docker.Image(data['source_image'])
self.assertEqual(image.name, data['name'])
self.assertEqual(image.tag, data['tag'])
self.assertEqual(image.registry, data['registry'])
if 'repr' in data:
self.assertEqual(repr(image), data['repr'])
if 'digest' in data:
self.assertEqual(image.digest, data['digest'])
def test_info(self):
return_value = SucceededResult('[{"Id": "123", "Image": "abc"}]')
expected = dict(Id='123', Image='abc')
image = docker.Image(name='name')
expected_command = 'docker inspect --type image name:latest'
with mock.patch.object(
fabricio,
'run',
return_value=return_value,
) as run:
self.assertEqual(expected, image.info)
run.assert_called_once_with(
expected_command,
abort_exception=docker.ImageNotFoundError,
)
@mock.patch.object(fabricio, 'run', side_effect=fabricio.Error)
def test_info_raises_error_if_image_not_found(self, run):
image = docker.Image(name='name')
expected_command = 'docker inspect --type image name:latest'
with self.assertRaises(fabricio.Error):
image.info
run.assert_called_once_with(
expected_command,
abort_exception=docker.ImageNotFoundError,
)
def test_delete(self):
cases = dict(
default=dict(
expeected_commands=[
mock.call('docker rmi image:latest', ignore_errors=True),
],
kwargs=dict(),
),
forced=dict(
expeected_commands=[
mock.call('docker rmi --force image:latest', ignore_errors=True),
],
kwargs=dict(force=True),
),
do_not_ignore_errors=dict(
expeected_commands=[
mock.call('docker rmi image:latest', ignore_errors=False),
],
kwargs=dict(ignore_errors=False),
),
)
for case, data in cases.items():
with self.subTest(case=case):
with mock.patch.object(fabricio, 'run') as run:
image = docker.Image('image')
image.delete(**data['kwargs'])
self.assertListEqual(
run.mock_calls,
data['expeected_commands'],
)
def test_name_tag_registry(self):
cases = dict(
single=dict(
init_kwargs=dict(
name='image',
),
expected_name='image',
expected_tag='latest',
expected_registry=None,
expected_str='image:latest',
),
with_tag=dict(
init_kwargs=dict(
name='image',
tag='tag',
),
expected_name='image',
expected_tag='tag',
expected_registry=None,
expected_str='image:tag',
),
with_registry=dict(
init_kwargs=dict(
name='image',
registry='registry:5000',
),
expected_name='image',
expected_tag='latest',
expected_registry='registry:5000',
expected_str='registry:5000/image:latest',
),
digest_with_registry=dict(
init_kwargs=dict(
name='image@digest',
registry='registry:5000',
),
expected_name='image',
expected_tag='digest',
expected_registry='registry:5000',
expected_str='registry:5000/image@digest',
),
with_tag_and_registry=dict(
init_kwargs=dict(
name='image',
tag='tag',
registry='127.0.0.1:5000',
),
expected_name='image',
expected_tag='tag',
expected_registry='127.0.0.1:5000',
expected_str='127.0.0.1:5000/image:tag',
),
with_tag_and_registry_and_user=dict(
init_kwargs=dict(
name='user/image',
tag='tag',
registry='127.0.0.1:5000',
),
expected_name='user/image',
expected_tag='tag',
expected_registry='127.0.0.1:5000',
expected_str='127.0.0.1:5000/user/image:tag',
),
single_arg_with_tag=dict(
init_kwargs=dict(
name='image:tag',
),
expected_name='image',
expected_tag='tag',
expected_registry=None,
expected_str='image:tag',
),
single_arg_with_digest=dict(
init_kwargs=dict(
name='image@digest',
),
expected_name='image',
expected_tag='digest',
expected_registry=None,
expected_str='image@digest',
),
single_arg_with_registry=dict(
init_kwargs=dict(
name='registry:123/image',
),
expected_name='image',
expected_tag='latest',
expected_registry='registry:123',
expected_str='registry:123/image:latest',
),
single_arg_with_tag_and_registry=dict(
init_kwargs=dict(
name='registry:123/image:tag',
),
expected_name='image',
expected_tag='tag',
expected_registry='registry:123',
expected_str='registry:123/image:tag',
),
single_arg_with_digest_and_registry=dict(
init_kwargs=dict(
name='registry:123/image@digest',
),
expected_name='image',
expected_tag='digest',
expected_registry='registry:123',
expected_str='registry:123/image@digest',
),
forced_with_tag=dict(
init_kwargs=dict(
name='image:tag',
tag='foo',
),
expected_name='image',
expected_tag='foo',
expected_registry=None,
expected_str='image:foo',
),
digest_forced_with_tag=dict(
init_kwargs=dict(
name='image@digest',
tag='foo',
),
expected_name='image',
expected_tag='foo',
expected_registry=None,
expected_str='image:foo',
),
forced_with_registry=dict(
init_kwargs=dict(
name='user/image',
registry='foo',
),
expected_name='user/image',
expected_tag='latest',
expected_registry='foo',
expected_str='foo/user/image:latest',
),
forced_with_tag_and_registry=dict(
init_kwargs=dict(
name='user/image:tag',
tag='foo',
registry='bar',
),
expected_name='user/image',
expected_tag='foo',
expected_registry='bar',
expected_str='bar/user/image:foo',
),
digest_forced_with_tag_and_registry=dict(
init_kwargs=dict(
name='user/image@digest',
tag='foo',
registry='bar',
),
expected_name='user/image',
expected_tag='foo',
expected_registry='bar',
expected_str='bar/user/image:foo',
),
)
for case, data in cases.items():
with self.subTest(case=case):
image = docker.Image(**data['init_kwargs'])
self.assertEqual(data['expected_name'], image.name)
self.assertEqual(data['expected_tag'], image.tag)
self.assertEqual(data['expected_registry'], image.registry)
self.assertEqual(data['expected_str'], str(image))
def test_getitem(self):
cases = dict(
none=dict(
image_init_kwargs=dict(name='name', tag='tag', registry='registry'),
item=None,
expected_str='registry/name:tag',
),
tag=dict(
image_init_kwargs=dict(name='name', tag='tag', registry='registry'),
item='custom_tag',
expected_str='registry/name:custom_tag',
),
digest_none=dict(
image_init_kwargs=dict(name='name@digest'),
item=None,
expected_str='name@digest',
),
digest_tag=dict(
image_init_kwargs=dict(name='name@digest'),
item='custom_tag',
expected_str='name:custom_tag',
),
override_name=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
item='name:tag',
expected_str='default/name:tag',
),
override_name_from_empty=dict(
image_init_kwargs=dict(),
item='name:tag',
expected_str='name:tag',
),
override_name_and_digest=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
item='name@digest',
expected_str='default/name@digest',
),
override_name_and_digest_from_empty=dict(
image_init_kwargs=dict(),
item='name@digest',
expected_str='name@digest',
),
override_name_and_account=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
item='account/name:tag',
expected_str='default/account/name:tag',
),
override_name_and_account_from_empty=dict(
image_init_kwargs=dict(),
item='account/name:tag',
expected_str='account/name:tag',
),
override_name_and_registry=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
item='localhost:5000/name:tag',
expected_str='localhost:5000/name:tag',
),
override_name_and_registry_from_empty=dict(
image_init_kwargs=dict(),
item='localhost:5000/name:tag',
expected_str='localhost:5000/name:tag',
),
override_digest_and_registry=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
item='localhost:5000/name@digest',
expected_str='localhost:5000/name@digest',
),
override_digest_and_registry_from_empty=dict(
image_init_kwargs=dict(),
item='localhost:5000/name@digest',
expected_str='localhost:5000/name@digest',
),
override_name_and_registry_and_account=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
item='localhost:5000/account/name:tag',
expected_str='localhost:5000/account/name:tag',
),
override_name_and_registry_and_account_from_empty=dict(
image_init_kwargs=dict(),
item='localhost:5000/account/name:tag',
expected_str='localhost:5000/account/name:tag',
),
override_digest_and_registry_and_account=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
item='localhost:5000/account/name@digest',
expected_str='localhost:5000/account/name@digest',
),
override_digest_and_registry_and_account_from_empty=dict(
image_init_kwargs=dict(),
item='localhost:5000/account/name@digest',
expected_str='localhost:5000/account/name@digest',
),
override_name_and_registry_skip_tag=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
item='localhost:5000/name',
expected_str='localhost:5000/name:latest',
),
override_name_and_registry_skip_tag_from_empty=dict(
image_init_kwargs=dict(),
item='localhost:5000/name',
expected_str='localhost:5000/name:latest',
),
override_name_and_registry_skip_tag_from_digest=dict(
image_init_kwargs=dict(name='name@digest'),
item='localhost:5000/name',
expected_str='localhost:5000/name:latest',
),
)
for case, data in cases.items():
with self.subTest(case=case):
image = docker.Image(**data['image_init_kwargs'])
new_image = image[data['item']]
self.assertEqual(data['expected_str'], str(new_image))
def test_getitem_slice(self):
cases = dict(
none=dict(
image_init_kwargs=dict(name='name', tag='tag', registry='registry'),
start=None,
stop=None,
step=None,
expected_str='registry/name:tag',
),
tag=dict(
image_init_kwargs=dict(name='name', tag='tag', registry='registry'),
start=None,
stop='custom_tag',
step=None,
expected_str='registry/name:custom_tag',
),
registry=dict(
image_init_kwargs=dict(name='name', tag='tag', registry='registry'),
start='registry:5000',
stop=None,
step=None,
expected_str='registry:5000/name:tag',
),
account=dict(
image_init_kwargs=dict(name='name', tag='tag', registry='registry'),
start=None,
stop=None,
step='account',
expected_str='registry/account/name:tag',
),
account_replace=dict(
image_init_kwargs=dict(name='original/name', tag='tag', registry='registry'),
start=None,
stop=None,
step='account',
expected_str='registry/account/name:tag',
),
complex=dict(
image_init_kwargs=dict(name='name', tag='tag', registry='registry'),
start='127.0.0.1:5000',
stop='custom_tag',
step='account',
expected_str='127.0.0.1:5000/account/name:custom_tag',
),
override_name=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
start='127.0.0.1:5000',
stop='name:tag',
step='account',
expected_str='127.0.0.1:5000/name:tag',
),
override_name_and_account=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
start='127.0.0.1:5000',
stop='acc/name:tag',
step='account',
expected_str='127.0.0.1:5000/acc/name:tag',
),
override_name_and_registry=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
start='127.0.0.1:5000',
stop='localhost:5000/name:tag',
step='account',
expected_str='localhost:5000/name:tag',
),
override_name_and_registry_skip_tag=dict(
image_init_kwargs=dict(name='default', tag='default', registry='default'),
start='127.0.0.1:5000',
stop='localhost:5000/name',
step='account',
expected_str='localhost:5000/name:latest',
),
override_name_and_registry_skip_tag_digest=dict(
image_init_kwargs=dict(name='name@digest'),
start='127.0.0.1:5000',
stop='localhost:5000/name',
step='account',
expected_str='localhost:5000/name:latest',
),
digest_none=dict(
image_init_kwargs=dict(name='name@digest'),
start=None,
stop=None,
step=None,
expected_str='name@digest',
),
digest_tag=dict(
image_init_kwargs=dict(name='name@digest'),
start=None,
stop='custom_tag',
step=None,
expected_str='name:custom_tag',
),
digest_registry=dict(
image_init_kwargs=dict(name='name@digest'),
start='registry:5000',
stop=None,
step=None,
expected_str='registry:5000/name@digest',
),
digest_complex=dict(
image_init_kwargs=dict(name='name@digest'),
start='127.0.0.1:5000',
stop='custom_tag',
step='account',
expected_str='127.0.0.1:5000/account/name:custom_tag',
),
)
for case, data in cases.items():
with self.subTest(case=case):
image = docker.Image(**data['image_init_kwargs'])
new_image = image[data['start']:data['stop']:data['step']]
self.assertEqual(data['expected_str'], str(new_image))
def test_run(self):
image = docker.Image('image')
cases = dict(
default=dict(
kwargs=dict(),
expected_args={
'executable': ['docker'],
'run_or_create': ['run'],
'rm': True,
'tty': True,
'interactive': True,
'image': 'image:latest',
'command': [],
},
),
with_main_option=dict(
kwargs=dict(options={'user': 'user'}),
expected_args={
'executable': ['docker'],
'run_or_create': ['run'],
'rm': True,
'tty': True,
'interactive': True,
'user': 'user',
'image': 'image:latest',
'command': [],
},
),
with_additional_option=dict(
kwargs=dict(options={'custom-option': 'bar'}),
expected_args={
'executable': ['docker'],
'run_or_create': ['run'],
'rm': True,
'tty': True,
'interactive': True,
'custom-option': 'bar',
'image': 'image:latest',
'command': [],
},
),
with_command=dict(
kwargs=dict(command='command'),
expected_args={
'executable': ['docker'],
'run_or_create': ['run'],
'rm': True,
'tty': True,
'interactive': True,
'image': 'image:latest',
'command': ['command'],
},
),
detached=dict(
kwargs=dict(temporary=False, name='name'),
expected_command='docker run --detach image:latest ',
expected_args={
'executable': ['docker'],
'run_or_create': ['run'],
'name': 'name',
'detach': True,
'image': 'image:latest',
'command': [],
},
),
with_name=dict(
kwargs=dict(name='name'),
expected_command='docker run --name name --rm --tty --interactive image:latest ',
expected_args={
'executable': ['docker'],
'run_or_create': ['run'],
'rm': True,
'tty': True,
'interactive': True,
'image': 'image:latest',
'name': 'name',
'command': [],
},
),
)
def test_command(command, *args, **kwargs):
options = docker_run_args_parser.parse_args(shlex.split(command))
self.assertDictEqual(vars(options), data['expected_args'])
for case, data in cases.items():
with self.subTest(case=case):
with mock.patch.object(fabricio, 'run', side_effect=test_command):
image.run(**data['kwargs'])
def test_image_as_descriptor(self):
class Container(docker.Container):
info = dict(Image='image_id')
cases = dict(
none=dict(
image=None,
expected_name=None,
expected_registry=None,
expected_tag=None,
),
name=dict(
image='image',
expected_name='image',
expected_registry=None,
expected_tag='latest',
),
name_and_tag=dict(
image='image:tag',
expected_name='image',
expected_registry=None,
expected_tag='tag',
),
name_and_registry=dict(
image='host:5000/image',
expected_name='image',
expected_registry='host:5000',
expected_tag='latest',
),
complex=dict(
image='host:5000/user/image:tag',
expected_name='user/image',
expected_registry='host:5000',
expected_tag='tag',
),
)
image = Container.image
self.assertIsInstance(image, docker.Image)
self.assertIsNone(image.name)
self.assertIsNone(image.registry)
self.assertIsNone(image.tag)
self.assertIs(Container.image, image)
for case, data in cases.items():
with self.subTest(case=case):
container = Container(image=data['image'])
self.assertIs(container.image, container.image)
self.assertIsInstance(container.image, docker.Image)
self.assertEqual(container.image.name, data['expected_name'])
self.assertEqual(container.image.registry, data['expected_registry'])
self.assertEqual(container.image.tag, data['expected_tag'])
self.assertEqual(str(container.image), 'image_id')
container.image = old_image = container.image
self.assertIsNot(container.image, old_image)
self.assertIsInstance(container.image, docker.Image)
self.assertEqual(container.image.name, data['expected_name'])
self.assertEqual(container.image.registry, data['expected_registry'])
self.assertEqual(container.image.tag, data['expected_tag'])
self.assertEqual(str(container.image), 'image_id')
for case, data in cases.items():
with self.subTest(case='redefine_' + case):
container = Container()
container.image = data['image']
self.assertIs(container.image, container.image)
self.assertIsInstance(container.image, docker.Image)
self.assertEqual(container.image.name, data['expected_name'])
self.assertEqual(container.image.registry, data['expected_registry'])
self.assertEqual(container.image.tag, data['expected_tag'])
self.assertEqual(str(container.image), 'image_id')
container.image = old_image = container.image
self.assertIsNot(container.image, old_image)
self.assertIsInstance(container.image, docker.Image)
self.assertEqual(container.image.name, data['expected_name'])
self.assertEqual(container.image.registry, data['expected_registry'])
self.assertEqual(container.image.tag, data['expected_tag'])
self.assertEqual(str(container.image), 'image_id')
for case, data in cases.items():
with self.subTest(case='predefined_' + case):
Container.image = docker.Image(data['image'])
container = Container()
self.assertIs(container.image, container.image)
self.assertIsInstance(container.image, docker.Image)
self.assertEqual(container.image.name, data['expected_name'])
self.assertEqual(container.image.registry, data['expected_registry'])
self.assertEqual(container.image.tag, data['expected_tag'])
self.assertEqual(str(container.image), 'image_id')
container.image = old_image = container.image
self.assertIsNot(container.image, old_image)
self.assertIsInstance(container.image, docker.Image)
self.assertEqual(container.image.name, data['expected_name'])
self.assertEqual(container.image.registry, data['expected_registry'])
self.assertEqual(container.image.tag, data['expected_tag'])
self.assertEqual(str(container.image), 'image_id')
def test_get_field_name_raises_error_on_collision(self):
class Container(docker.Container):
image2 = docker.Container.image
container = Container(name='name')
with self.assertRaises(ValueError):
_ = container.image
@mock.patch('fabricio.run')
@mock.patch('fabricio.local')
def test_build(self, local, run):
cases = dict(
default=dict(
build_kwargs=dict(),
expected_calls=[
mock.call.run('docker inspect --type image image:latest', abort_exception=docker.ImageNotFoundError, use_cache=False),
mock.call.run('docker tag image:latest fabricio-temp-image:image && docker rmi image:latest', use_cache=False, ignore_errors=True),
mock.call.run('docker build --tag=image:latest --pull=1 --force-rm=1 .', use_cache=False, quiet=False),
mock.call.run('docker rmi fabricio-temp-image:image old_parent_id', use_cache=False, ignore_errors=True),
],
side_effect=[
SucceededResult('[{"Parent": "old_parent_id"}]'),
SucceededResult(),
SucceededResult(),
SucceededResult(),
],
),
local=dict(
build_kwargs=dict(local=True),
expected_calls=[
mock.call.local('docker inspect --type image image:latest', abort_exception=docker.ImageNotFoundError, use_cache=False, capture=True),
mock.call.local('docker tag image:latest fabricio-temp-image:image && docker rmi image:latest', use_cache=False, ignore_errors=True),
mock.call.local('docker build --tag=image:latest --pull=1 --force-rm=1 .', use_cache=False, quiet=False),
mock.call.local('docker rmi fabricio-temp-image:image old_parent_id', use_cache=False, ignore_errors=True),
],
side_effect=[
SucceededResult('[{"Parent": "old_parent_id"}]'),
SucceededResult(),
SucceededResult(),
SucceededResult(),
],
),
errors=dict(
build_kwargs=dict(),
expected_calls=[
mock.call.run('docker inspect --type image image:latest', abort_exception=docker.ImageNotFoundError, use_cache=False),
mock.call.run('docker tag image:latest fabricio-temp-image:image && docker rmi image:latest', use_cache=False, ignore_errors=True),
mock.call.run('docker build --tag=image:latest --pull=1 --force-rm=1 .', use_cache=False, quiet=False),
mock.call.run('docker rmi fabricio-temp-image:image ', use_cache=False, ignore_errors=True),
],
side_effect=[
docker.ImageNotFoundError(),
FailedResult(),
SucceededResult(),
FailedResult(),
],
),
)
build = mock.Mock()
build.attach_mock(run, 'run')
build.attach_mock(local, 'local')
for case, data in cases.items():
with self.subTest(case=case):
build.reset_mock()
run.side_effect = local.side_effect = data['side_effect']
image = docker.Image('image')
image.build(**data['build_kwargs'])
self.assertListEqual(data['expected_calls'], build.mock_calls)
class ServiceTestCase(unittest.TestCase):
maxDiff = None
def setUp(self):
self.fab_settings = fab.settings(fab.hide('everything'))
self.fab_settings.__enter__()
def tearDown(self):
fabricio.run.cache.clear()
self.fab_settings.__exit__(None, None, None)
def test_update(self):
cases = dict(
worker=dict(
init_kwargs=dict(
name='service',
image='image:tag',
),
update_kwargs=dict(),
side_effect=(
SucceededResult(' Is Manager: false'), # manager status
),
args_parsers=[
args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
],
expected_result=False,
all_hosts=['host1', 'host2'],
),
worker_without_manager=dict(
init_kwargs=dict(
name='service',
image='image:tag',
),
update_kwargs=dict(),
side_effect=(
SucceededResult(' Is Manager: false'), # manager status
),
args_parsers=[
args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
],
expected_result=docker.ServiceError,
),
is_manager_fails=dict(
init_kwargs=dict(
name='service',
image='image:tag',
),
update_kwargs=dict(),
side_effect=(
fabricio.Error(), # manager status
),
args_parsers=[
args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
],
expected_result=docker.ServiceError,
),
is_manager_fails_multiple_hosts=dict(
init_kwargs=dict(
name='service',
image='image:tag',
),
update_kwargs=dict(),
side_effect=(
fabricio.Error(), # manager status
),
args_parsers=[
args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
],
expected_result=False,
all_hosts=['host1', 'host2'],
),
no_changes=dict(
init_kwargs=dict(
name='service',
image='image:tag',
options=dict(
secret='secret',
),
),
update_kwargs=dict(),
side_effect=(
SucceededResult(' Is Manager: true'), # manager status
SucceededResult('[{"RepoDigests": ["digest"]}]'), # image info
SucceededResult(json.dumps([{"Spec": {
"Labels": {
"fabricio.service.options": "b1a9a7833e4ca8b5122b9db71844ed33",
},
"TaskTemplate": {
"ContainerSpec": {
"Secrets": [
{
"File": {
"Name": "secret",
},
"SecretID": "secret",
"SecretName": "secret",
},
],
},
},
}}])), # service info
),
args_parsers=[
args_parser,
docker_inspect_args_parser,
docker_entity_inspect_args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'executable': ['docker', 'inspect'],
'type': 'image',
'image_or_container': 'image:tag',
},
{
'executable': ['docker', 'service', 'inspect'],
'service': 'service',
},
],
expected_result=False,
),
forced=dict(
init_kwargs=dict(
name='service',
image='image:tag',
options=dict(
secret='secret',
),
),
update_kwargs=dict(force=True),
side_effect=(
SucceededResult(' Is Manager: true'), # manager status
SucceededResult('[{"RepoDigests": ["digest"]}]'), # image info
SucceededResult(json.dumps([{"Spec": {
"Labels": {
"fabricio.service.options": "b1a9a7833e4ca8b5122b9db71844ed33",
},
"TaskTemplate": {
"ContainerSpec": {
"Secrets": [
{
"File": {
"Name": "secret",
},
"SecretID": "secret",
"SecretName": "secret",
},
],
},
},
}}])), # service info
SucceededResult(), # service update
),
args_parsers=[
args_parser,
docker_inspect_args_parser,
docker_entity_inspect_args_parser,
docker_service_update_args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'executable': ['docker', 'inspect'],
'type': 'image',
'image_or_container': 'image:tag',
},
{
'executable': ['docker', 'service', 'inspect'],
'service': 'service',
},
{
'executable': ['docker', 'service', 'update'],
'image': 'digest',
'service': 'service',
'args': '',
'label-add': [
'fabricio.service.options=b1a9a7833e4ca8b5122b9db71844ed33',
],
'secret-add': ['secret'],
'secret-rm': ['secret'],
},
],
expected_result=True,
),
updated=dict(
init_kwargs=dict(
name='service',
image='image:tag',
),
update_kwargs=dict(),
side_effect=(
SucceededResult(' Is Manager: true'), # manager status
SucceededResult('[{"RepoDigests": ["digest"]}]'), # image info
SucceededResult('[{"Spec": {}}]'), # service info
SucceededResult(), # service update
),
args_parsers=[
args_parser,
docker_inspect_args_parser,
docker_entity_inspect_args_parser,
docker_service_update_args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'executable': ['docker', 'inspect'],
'type': 'image',
'image_or_container': 'image:tag',
},
{
'executable': ['docker', 'service', 'inspect'],
'service': 'service',
},
{
'executable': ['docker', 'service', 'update'],
'image': 'digest',
'service': 'service',
'args': '',
'label-add': [
'fabricio.service.options=5ed89ef87bc69f63506f92169933231d',
],
},
],
expected_result=True,
),
updated_with_custom_labels_and_args=dict(
init_kwargs=dict(
name='service',
image='image:tag',
options=dict(label=['label1=label1', 'label2=label2']),
args='foo bar',
),
update_kwargs=dict(),
side_effect=(
SucceededResult(' Is Manager: true'), # manager status
SucceededResult('[{"RepoDigests": ["digest"]}]'), # image info
SucceededResult('[{"Spec": {}}]'), # service info
SucceededResult(), # service update
),
args_parsers=[
args_parser,
docker_inspect_args_parser,
docker_entity_inspect_args_parser,
docker_service_update_args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'executable': ['docker', 'inspect'],
'type': 'image',
'image_or_container': 'image:tag',
},
{
'executable': ['docker', 'service', 'inspect'],
'service': 'service',
},
{
'executable': ['docker', 'service', 'update'],
'image': 'digest',
'service': 'service',
'label-add': [
'label1=label1',
'label2=label2',
'fabricio.service.options=0a4991404e926ea32115d3ad6debf1c7',
],
'args': 'foo bar',
},
],
expected_result=True,
),
updated_with_custom_tag_and_registry_and_account=dict(
init_kwargs=dict(
name='service',
image='image:tag',
),
update_kwargs=dict(tag='custom_tag', registry='registry', account='account'),
side_effect=(
SucceededResult(' Is Manager: true'), # manager status
SucceededResult('[{"RepoDigests": ["digest"]}]'), # image info
SucceededResult('[{"Spec": {}}]'), # service info
SucceededResult(), # service update
),
args_parsers=[
args_parser,
docker_inspect_args_parser,
docker_entity_inspect_args_parser,
docker_service_update_args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'executable': ['docker', 'inspect'],
'type': 'image',
'image_or_container': 'registry/account/image:custom_tag',
},
{
'executable': ['docker', 'service', 'inspect'],
'service': 'service',
},
{
'executable': ['docker', 'service', 'update'],
'image': 'digest',
'service': 'service',
'args': '',
'label-add': [
'fabricio.service.options=5ed89ef87bc69f63506f92169933231d',
],
},
],
expected_result=True,
),
created=dict(
init_kwargs=dict(
name='service',
image='image:tag',
),
update_kwargs=dict(),
side_effect=(
SucceededResult(' Is Manager: true'), # manager status
SucceededResult('[{"RepoDigests": ["digest"]}]'), # image info
docker.ServiceNotFoundError(), # service info
SucceededResult(), # service create
),
args_parsers=[
args_parser,
docker_inspect_args_parser,
docker_entity_inspect_args_parser,
docker_service_create_args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'executable': ['docker', 'inspect'],
'type': 'image',
'image_or_container': 'image:tag',
},
{
'executable': ['docker', 'service', 'inspect'],
'service': 'service',
},
{
'executable': ['docker', 'service', 'create'],
'image': ['digest'],
'name': 'service',
'args': [],
'label': [
'fabricio.service.options=5ed89ef87bc69f63506f92169933231d',
],
},
],
expected_result=True,
),
created_with_custom_tag_and_registry=dict(
init_kwargs=dict(
name='service',
image='image:tag',
),
update_kwargs=dict(tag='custom_tag', registry='registry'),
side_effect=(
SucceededResult(' Is Manager: true'), # manager status
SucceededResult('[{"RepoDigests": ["digest"]}]'), # image info
docker.ServiceNotFoundError(), # service info
SucceededResult(), # service create
),
args_parsers=[
args_parser,
docker_inspect_args_parser,
docker_entity_inspect_args_parser,
docker_service_create_args_parser,
],
expected_args=[
{
'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'],
},
{
'executable': ['docker', 'inspect'],
'type': 'image',
'image_or_container': 'registry/image:custom_tag',
},
{
'executable': ['docker', 'service', 'inspect'],
'service': 'service',
},
{
'executable': ['docker', 'service', 'create'],
'image': ['digest'],
'name': 'service',
'args': [],
'label': [
'fabricio.service.options=5ed89ef87bc69f63506f92169933231d',
],
},
],
expected_result=True,
),
)
def test_command(command, **kwargs):
args = shlex.split(command)
parser = next(args_parsers)
options = vars(parser.parse_args(args))
self.assertDictEqual(options, next(expected_args))
result = next(side_effect)
if isinstance(result, Exception):
raise result
return result
for case, data in cases.items():
expected_args = iter(data['expected_args'])
args_parsers = iter(data['args_parsers'])
side_effect = iter(data['side_effect'])
with self.subTest(case=case):
fab.env.command = '{0}__{1}'.format(self, case)
fabricio.run.cache.clear() # reset Service.is_manager()
with mock.patch.dict(fab.env, dict(all_hosts=data.get('all_hosts', ['host']))):
with mock.patch.object(fab, 'run', side_effect=test_command) as run:
run.__name__ = 'mocked_run'
service = docker.Service(**data['init_kwargs'])
expected_result = data['expected_result']
try:
result = service.update(**data['update_kwargs'])
self.assertEqual(result, expected_result)
except AssertionError:
raise
except Exception as exception:
try:
is_exception_expected = issubclass(expected_result, Exception)
except TypeError:
is_exception_expected = False
if not is_exception_expected:
raise
self.assertIsInstance(exception, expected_result)
self.assertEqual(run.call_count, len(data['expected_args']))
@mock.patch.dict(fab.env, dict(all_hosts=['host1', 'host2']))
def test_is_manager_returns_false_if_pull_error(self, *args):
with mock.patch.object(fabricio, 'run') as run:
service = docker.Service(name='service')
service.managers[fab.env.host] = False
self.assertFalse(service.is_manager())
run.assert_not_called()
@mock.patch.dict(fab.env, dict(all_hosts=['host']))
def test_is_manager_raises_error_if_all_pulls_failed(self):
with mock.patch.object(fabricio, 'run') as run:
service = docker.Service(name='service')
service.managers[fab.env.host] = False
with self.assertRaises(docker.ServiceError):
service.is_manager()
run.assert_not_called()
def test_pull_image(self):
cases = dict(
no_errors=dict(
side_effect=[
SucceededResult(),
SucceededResult(),
SucceededResult(),
],
expected_manager_status=None,
),
ignored_errors=dict(
side_effect=[
FailedResult(),
SucceededResult(),
FailedResult(),
],
expected_manager_status=None,
),
errors=dict(
side_effect=[
FailedResult(),
fabricio.Error(),
],
expected_manager_status=False,
),
)
for case, test_data in cases.items():
with self.subTest(case=case):
service = docker.Service(name='service', image='image')
with mock.patch.object(
fabricio,
'run',
side_effect=test_data['side_effect']
):
service.pull_image()
self.assertEqual(
test_data['expected_manager_status'],
service.managers.get(fab.env.host),
)
def test_update_options(self):
cases = dict(
default=dict(
init_kwargs=dict(name='name'),
service_info=dict(),
expected={
'args': '',
},
),
empty_args=dict(
init_kwargs=dict(name='name', command='', args=''),
service_info=dict(),
expected={
'args': '',
},
),
command=dict(
init_kwargs=dict(name='name', command='command'),
service_info=dict(),
expected={
'args': 'command',
},
),
args_and_command=dict(
init_kwargs=dict(name='name', command='command', args='arg1 arg2'),
service_info=dict(),
expected={
'args': 'command arg1 arg2',
},
),
new_option_value=dict(
init_kwargs=dict(
name='service',
args='arg1 "arg2" \'arg3\'',
options=dict(
publish='80:80',
mount='type=volume,destination=/path',
label='label=value',
env='FOO=bar',
constraint='node.role == manager',
container_label='label=value',
network='network',
restart_condition='on-failure',
stop_grace_period=20,
custom_option='custom_value',
replicas=3,
user='user',
host='foo:127.0.0.2',
secret='source=secret,target=/secret2',
config='config',
group='42',
placement_pref='spread=node.role',
dns='8.8.8.8',
dns_option='option',
dns_search='domain',
),
mode='mode',
),
service_info=dict(),
expected={
'env-add': ['FOO=bar'],
'constraint-add': ['node.role == manager'],
'publish-add': ['80:80'],
'label-add': ['label=value'],
'args': 'arg1 "arg2" \'arg3\'',
'user': 'user',
'replicas': 3,
'mount-add': ['type=volume,destination=/path'],
'network-add': ['network'],
'stop-grace-period': 20,
'restart-condition': 'on-failure',
'custom_option': 'custom_value',
'container-label-add': ['label=value'],
'host-add': ['foo:127.0.0.2'],
'secret-add': ['source=secret,target=/secret2'],
'config-add': ['config'],
'group-add': ['42'],
'placement-pref-add': ['spread=node.role'],
'dns-add': ['8.8.8.8'],
'dns-option-add': ['option'],
'dns-search-add': ['domain'],
},
),
new_option_value_with_custom_name=dict(
init_kwargs=dict(
name='service',
args='arg1 "arg2" \'arg3\'',
options={
'container-label': 'label=value',
'restart-condition': 'on-failure',
'stop-grace-period': 20,
'placement-pref': 'spread=node.role',
'dns-option': 'option',
'dns-search': 'domain',
},
mode='mode',
),
service_info=dict(),
expected={
'args': 'arg1 "arg2" \'arg3\'',
'stop-grace-period': 20,
'restart-condition': 'on-failure',
'container-label-add': ['label=value'],
'placement-pref-add': ['spread=node.role'],
'dns-option-add': ['option'],
'dns-search-add': ['domain'],
},
),
changed_option_value=dict(
init_kwargs=dict(
name='service',
args='arg1 "arg2" \'arg3\'',
options=dict(
publish=['8000:80', 81, '82'],
mount=[
'type=new_type,destination=/path',
'type=new_type,dst=/path2',
'type=new_type,target=/path3',
],
label='label=new_value',
env='FOO=baz',
constraint='node.role == worker',
container_label='label=container_new_value',
network='new_network',
restart_condition='any',
stop_grace_period=20,
custom_option='new_custom_value',
replicas=2,
user='new_user',
host='foo:127.0.0.2',
secret='source=secret,target=/secret2',
config='source=config,target=/config2',
group='new',
placement_pref='spread=new',
dns='new',
dns_option='new',
dns_search='new',
),
mode='mode',
),
service_info=dict(
Spec=dict(
Labels=dict(
label='value',
),
TaskTemplate=dict(
ContainerSpec=dict(
Labels=dict(
label='value',
),
Env=[
'FOO=bar',
],
Mounts=[
dict(
Type='volume',
Source='/source',
Target='/path',
),
dict(
Type='bind',
Source='/source2',
Target='/path2',
),
dict(
Type='volume',
Source='/source3',
Target='/path3',
),
],
Hosts=[
"127.0.0.1 foo",
],
Secrets=[
dict(
File=dict(
Name='/secret1',
),
SecretID='secret',
SecretName='secret',
),
],
Configs=[
dict(
File=dict(
Name='/config1',
),
ConfigID='config',
ConfigName='config',
),
],
Groups=[
'old',
],
DNSConfig=dict(
Nameservers=[
'old',
],
Options=[
'old',
],
Search=[
'old',
],
),
),
Placement=dict(
Constraints=[
'node.role == manager',
],
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='old',
),
),
],
),
Networks=[
{
'Target': 'old_network_id',
},
],
),
EndpointSpec=dict(
Ports=[
dict(
TargetPort=80,
Protocol='tcp',
PublishedPort=8080,
),
dict(
TargetPort=81,
Protocol='tcp',
PublishedPort=8081,
),
dict(
TargetPort=82,
Protocol='udp',
PublishedPort=8082,
),
],
),
),
),
expected={
'env-add': ['FOO=baz'],
'constraint-add': ['node.role == worker'],
'label-add': ['label=new_value'],
'args': 'arg1 "arg2" \'arg3\'',
'user': 'new_user',
'replicas': 2,
'mount-add': [
'type=new_type,destination=/path',
'type=new_type,dst=/path2',
'type=new_type,target=/path3',
],
'network-add': ['new_network'],
'network-rm': ['old_network_id'],
'publish-add': ['8000:80', 81, '82'],
'constraint-rm': ['node.role == manager'],
'stop-grace-period': 20,
'restart-condition': 'any',
'custom_option': 'new_custom_value',
'container-label-add': ['label=container_new_value'],
'host-add': ['foo:127.0.0.2'],
'host-rm': ['foo:127.0.0.1'],
'secret-add': ['source=secret,target=/secret2'],
'secret-rm': ['secret'],
'config-add': ['source=config,target=/config2'],
'config-rm': ['config'],
'group-add': ['new'],
'group-rm': ['old'],
'dns-add': ['new'],
'dns-rm': ['old'],
'dns-option-add': ['new'],
'dns-option-rm': ['old'],
'dns-search-add': ['new'],
'dns-search-rm': ['old'],
'placement-pref-add': ['spread=new'],
'placement-pref-rm': ['spread=old'],
},
),
changed_option_value_with_custom_name=dict(
init_kwargs=dict(
name='service',
args='arg1 "arg2" \'arg3\'',
options={
'container-label': 'label=container_new_value',
'restart-condition': 'any',
'stop-grace-period': 20,
'placement-pref': 'spread=new',
'dns-option': 'new',
'dns-search': 'new',
},
mode='mode',
),
service_info=dict(
Spec=dict(
TaskTemplate=dict(
ContainerSpec=dict(
Labels=dict(
label='value',
),
DNSConfig=dict(
Options=[
'old',
],
Search=[
'old',
],
),
),
Placement=dict(
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='old',
),
),
],
),
),
),
),
expected={
'args': 'arg1 "arg2" \'arg3\'',
'stop-grace-period': 20,
'restart-condition': 'any',
'container-label-add': ['label=container_new_value'],
'dns-option-add': ['new'],
'dns-option-rm': ['old'],
'dns-search-add': ['new'],
'dns-search-rm': ['old'],
'placement-pref-add': ['spread=new'],
'placement-pref-rm': ['spread=old'],
},
),
no_changes=dict(
init_kwargs=dict(
name='service',
args='arg1 "arg2" \'arg3\'',
options=dict(
publish='8080:80',
mount='type=type,destination=/path',
label='label=value',
env='FOO=bar',
constraint='node.role == manager',
container_label='label=value',
network='network',
restart_condition='any',
stop_grace_period=20,
custom_option='new_custom_value',
replicas=2,
user='user',
host='foo:127.0.0.1',
secret='source=secret,target=/secret',
config='config',
group='old',
placement_pref='spread=old',
dns='old',
dns_option='old',
dns_search='old',
),
mode='mode',
),
service_info=dict(
Spec=dict(
Labels=dict(
label='value',
),
TaskTemplate=dict(
ContainerSpec=dict(
Labels=dict(
label='value',
),
Env=[
'FOO=bar',
],
Mounts=[
dict(
Type='volume',
Source='/source',
Target='/path',
),
],
Hosts=[
"127.0.0.1 foo",
],
Secrets=[
dict(
File=dict(
Name='/secret',
),
SecretID='secret',
SecretName='secret',
),
],
Configs=[
dict(
File=dict(
Name='/config',
),
ConfigID='config',
ConfigName='config',
),
],
Groups=[
'old',
],
DNSConfig=dict(
Nameservers=[
'old',
],
Options=[
'old',
],
Search=[
'old',
],
),
),
Placement=dict(
Constraints=[
'node.role == manager',
],
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='old',
),
),
],
),
Networks=[
{
'Target': 'network_id',
},
],
),
EndpointSpec=dict(
Ports=[
dict(
TargetPort=80,
Protocol='tcp',
PublishedPort=8080,
),
],
),
),
),
expected={
'container-label-add': ['label=value'],
'env-add': ['FOO=bar'],
'label-add': ['label=value'],
'publish-add': ['8080:80'],
'args': 'arg1 "arg2" \'arg3\'',
'user': 'user',
'replicas': 2,
'mount-add': ['type=type,destination=/path'],
'network-add': ['network'],
'network-rm': ['network_id'],
'stop-grace-period': 20,
'restart-condition': 'any',
'custom_option': 'new_custom_value',
'secret-add': ['source=secret,target=/secret'],
'secret-rm': ['secret'],
'config-add': ['config'],
'config-rm': ['config'],
'placement-pref-add': ['spread=old'],
'placement-pref-rm': ['spread=old'],
},
),
no_changes_with_custom_name=dict(
init_kwargs=dict(
name='service',
args='arg1 "arg2" \'arg3\'',
options={
'container-label': 'label=value',
'restart-condition': 'any',
'stop-grace-period': 20,
'placement-pref': 'spread=old',
'dns-option': 'old',
'dns-search': 'old',
},
mode='mode',
),
service_info=dict(
Spec=dict(
TaskTemplate=dict(
ContainerSpec=dict(
Labels=dict(
label='value',
),
DNSConfig=dict(
Options=[
'old',
],
Search=[
'old',
],
),
),
Placement=dict(
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='old',
),
),
],
),
),
),
),
expected={
'container-label-add': ['label=value'],
'args': 'arg1 "arg2" \'arg3\'',
'stop-grace-period': 20,
'restart-condition': 'any',
'placement-pref-add': ['spread=old'],
'placement-pref-rm': ['spread=old'],
},
),
no_changes_callable=dict(
init_kwargs=dict(
name='service',
args='arg1 "arg2" \'arg3\'',
options=dict(
publish=lambda service: '8080:80',
mount=lambda service: 'type=type,destination=/path',
label=lambda service: 'label=value',
env=lambda service: 'FOO=bar',
constraint=lambda service: 'node.role == manager',
container_label=lambda service: 'label=value',
network=lambda service: 'network',
restart_condition=lambda service: 'any',
stop_grace_period=lambda service: 20,
custom_option=lambda service: 'new_custom_value',
replicas=lambda service: 2,
user=lambda service: 'user',
host=lambda service: 'foo:127.0.0.1',
secret=lambda service: 'source=secret,target=/secret',
config=lambda service: 'config',
group=lambda service: 'old',
placement_pref=lambda service: 'spread=old',
dns=lambda service: 'old',
dns_option=lambda service: 'old',
dns_search=lambda service: 'old',
),
mode='mode',
),
service_info=dict(
Spec=dict(
Labels=dict(
label='value',
),
TaskTemplate=dict(
ContainerSpec=dict(
Labels=dict(
label='value',
),
Env=[
'FOO=bar',
],
Mounts=[
dict(
Type='volume',
Source='/source',
Target='/path',
),
],
Hosts=[
"127.0.0.1 foo",
],
Secrets=[
dict(
File=dict(
Name='/secret',
),
SecretID='secret',
SecretName='secret',
),
],
Configs=[
dict(
File=dict(
Name='/config',
),
ConfigID='config',
ConfigName='config',
),
],
Groups=[
'old',
],
DNSConfig=dict(
Nameservers=[
'old',
],
Options=[
'old',
],
Search=[
'old',
],
),
),
Placement=dict(
Constraints=[
'node.role == manager',
],
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='old',
),
),
],
),
Networks=[
{
'Target': 'network_id',
},
],
),
EndpointSpec=dict(
Ports=[
dict(
TargetPort=80,
Protocol='tcp',
PublishedPort=8080,
),
],
),
),
),
expected={
'container-label-add': ['label=value'],
'env-add': ['FOO=bar'],
'label-add': ['label=value'],
'publish-add': ['8080:80'],
'args': 'arg1 "arg2" \'arg3\'',
'user': 'user',
'replicas': 2,
'mount-add': ['type=type,destination=/path'],
'network-add': ['network'],
'network-rm': ['network_id'],
'stop-grace-period': 20,
'restart-condition': 'any',
'custom_option': 'new_custom_value',
'secret-add': ['source=secret,target=/secret'],
'secret-rm': ['secret'],
'config-add': ['config'],
'config-rm': ['config'],
'placement-pref-add': ['spread=old'],
'placement-pref-rm': ['spread=old'],
},
),
no_changes_callable_with_custom_name=dict(
init_kwargs=dict(
name='service',
args='arg1 "arg2" \'arg3\'',
options={
'container-label': lambda service: 'label=value',
'restart-condition': lambda service: 'any',
'stop-grace-period': lambda service: 20,
'placement-pref': lambda service: 'spread=old',
'dns-option': lambda service: 'old',
'dns-search': lambda service: 'old',
},
mode='mode',
),
service_info=dict(
Spec=dict(
TaskTemplate=dict(
ContainerSpec=dict(
Labels=dict(
label='value',
),
DNSConfig=dict(
Options=[
'old',
],
Search=[
'old',
],
),
),
Placement=dict(
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='old',
),
),
],
),
),
),
),
expected={
'container-label-add': ['label=value'],
'args': 'arg1 "arg2" \'arg3\'',
'stop-grace-period': 20,
'restart-condition': 'any',
'placement-pref-add': ['spread=old'],
'placement-pref-rm': ['spread=old'],
},
),
new_options_values=dict(
init_kwargs=dict(
name='service',
options=dict(
publish=[
'80:80',
'81:81',
],
mount=[
'type=volume,destination=/path',
'type=volume,destination="/path2"',
],
label=[
'label=value',
'label2=value2',
],
container_label=[
'label=value',
'label2=value2',
],
constraint=[
'node.role == manager',
'node.role == worker',
],
env=[
'FOO=bar',
'FOO2=bar2',
],
network=[
'network1',
'network2',
],
host=[
'foo:127.0.0.2',
'bar:127.0.0.3',
],
secret=[
'secret',
'source=secret,target=/secret2',
],
config=[
'config',
'source=config,target=/config2',
],
group=['group1', 'group2'],
placement_pref=['spread=spread1', 'spread=spread2,foo=bar'],
dns=['dns1', 'dns2'],
dns_option=['option1', 'option2'],
dns_search=['domain1', 'domain2'],
),
),
service_info=dict(),
expected={
'env-add': ['FOO=bar', 'FOO2=bar2'],
'constraint-add': ['node.role == manager', 'node.role == worker'],
'publish-add': ['80:80', '81:81'],
'label-add': ['label=value', 'label2=value2'],
'mount-add': ['type=volume,destination=/path', 'type=volume,destination="/path2"'],
'container-label-add': ['label=value', 'label2=value2'],
'network-add': ['network1', 'network2'],
'args': '',
'host-add': ['foo:127.0.0.2', 'bar:127.0.0.3'],
'secret-add': ['secret', 'source=secret,target=/secret2'],
'config-add': ['config', 'source=config,target=/config2'],
'group-add': ['group1', 'group2'],
'dns-add': ['dns1', 'dns2'],
'dns-option-add': ['option1', 'option2'],
'dns-search-add': ['domain1', 'domain2'],
'placement-pref-add': ['spread=spread1', 'spread=spread2,foo=bar'],
},
),
new_options_values_with_custom_name=dict(
init_kwargs=dict(
name='service',
options={
'container-label': [
'label=value',
'label2=value2',
],
'placement-pref': ['spread=spread1', 'spread=spread2,foo=bar'],
'dns-option': ['option1', 'option2'],
'dns-search': ['domain1', 'domain2'],
},
),
service_info=dict(),
expected={
'container-label-add': ['label=value', 'label2=value2'],
'args': '',
'dns-option-add': ['option1', 'option2'],
'dns-search-add': ['domain1', 'domain2'],
'placement-pref-add': ['spread=spread1', 'spread=spread2,foo=bar'],
},
),
remove_option_value=dict(
init_kwargs=dict(
name='service',
),
service_info=dict(
Spec=dict(
Labels=dict(
label='value',
),
TaskTemplate=dict(
ContainerSpec=dict(
Labels=dict(
label='value',
),
Env=[
'FOO=bar',
],
Mounts=[
dict(
Type='volume',
Source='/source',
Target='/path',
),
],
Hosts=[
"127.0.0.1 foo",
],
Secrets=[
dict(
File=dict(
Name='/secret1',
),
SecretID='secret1',
SecretName='secret1',
),
],
Configs=[
dict(
File=dict(
Name='/config',
),
ConfigID='config',
ConfigName='config',
),
],
Groups=[
'old',
],
DNSConfig=dict(
Nameservers=[
'old',
],
Options=[
'old',
],
Search=[
'old',
],
),
),
Placement=dict(
Constraints=[
'node.role == manager',
],
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='old',
),
),
],
),
Networks=[
{
'Target': 'old_network_id',
},
],
),
EndpointSpec=dict(
Ports=[
dict(
TargetPort=80,
Protocol='tcp',
PublishedPort=80,
),
],
),
),
),
expected={
'args': '',
'label-rm': ['label'],
'env-rm': ['FOO'],
'mount-rm': ['/path'],
'container-label-rm': ['label'],
'publish-rm': [80],
'network-rm': ['old_network_id'],
'constraint-rm': ['node.role == manager'],
'host-rm': ['foo:127.0.0.1'],
'secret-rm': ['secret1'],
'config-rm': ['config'],
'group-rm': ['old'],
'dns-rm': ['old'],
'dns-option-rm': ['old'],
'dns-search-rm': ['old'],
'placement-pref-rm': ['spread=old'],
},
),
remove_option_value_with_custom_name=dict(
init_kwargs=dict(
name='service',
),
service_info=dict(
Spec=dict(
TaskTemplate=dict(
ContainerSpec=dict(
Labels=dict(
label='value',
),
DNSConfig=dict(
Options=[
'old',
],
Search=[
'old',
],
),
),
Placement=dict(
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='old',
),
),
],
),
),
),
),
expected={
'args': '',
'container-label-rm': ['label'],
'dns-option-rm': ['old'],
'dns-search-rm': ['old'],
'placement-pref-rm': ['spread=old'],
},
),
remove_single_option_value_from_two=dict(
init_kwargs=dict(
name='service',
options=dict(
publish='80:80',
mount='type=volume,destination=/path',
label='label=value',
env='FOO=bar',
constraint='node.role == manager',
container_label='label=value',
network='network2',
host='foo:127.0.0.1',
secret='source=secret1,target=/secret1',
config='config',
group='new',
placement_pref='spread=new',
dns='new',
dns_option='new',
dns_search='new',
),
),
service_info=dict(
Spec=dict(
Labels=OrderedDict([
('label', 'value'),
('label2', 'value2'),
]),
TaskTemplate=dict(
ContainerSpec=dict(
Labels=OrderedDict([
('label', 'value'),
('label2', 'value2'),
]),
Env=[
'FOO=bar',
'FOO2=bar2',
],
Mounts=[
dict(
Type='volume',
Source='/source',
Target='/path',
),
dict(
Type='volume',
Source='/source2',
Target='/path2',
),
],
Hosts=[
"127.0.0.1 foo",
"127.0.0.1 bar",
],
Secrets=[
dict(
File=dict(
Name='/secret1',
),
SecretID='secret1',
SecretName='secret1',
),
dict(
File=dict(
Name='/secret2',
),
SecretID='secret2',
SecretName='secret2',
),
],
Configs=[
dict(
File=dict(
Name='/config',
),
ConfigID='config',
ConfigName='config',
),
dict(
File=dict(
Name='/config2',
),
ConfigID='config2',
ConfigName='config2',
),
],
Groups=[
'new',
'old',
],
DNSConfig=dict(
Nameservers=[
'new',
'old',
],
Options=[
'new',
'old',
],
Search=[
'new',
'old',
],
),
),
Placement=dict(
Constraints=[
'node.role == manager',
'node.role == worker',
],
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='new',
),
),
OrderedDict((
('Spread', dict(
SpreadDescriptor='old',
)),
('Foo', dict(
FooDescriptor='old',
)),
)),
],
),
Networks=[
{
'Target': 'network1_id',
},
{
'Target': 'network2_id',
},
],
),
EndpointSpec=dict(
Ports=[
dict(
TargetPort=80,
Protocol='tcp',
PublishedPort=80,
),
dict(
TargetPort=81,
Protocol='tcp',
PublishedPort=81,
),
],
),
),
),
expected={
'container-label-add': ['label=value'],
'container-label-rm': ['label2'],
'env-add': ['FOO=bar'],
'env-rm': ['FOO2'],
'label-add': ['label=value'],
'label-rm': ['label2'],
'args': '',
'publish-add': ['80:80'],
'publish-rm': [81],
'mount-rm': ['/path2'],
'network-add': ['network2'],
'network-rm': ['network1_id', 'network2_id'],
'mount-add': ['type=volume,destination=/path'],
'constraint-rm': ['node.role == worker'],
'host-rm': ['bar:127.0.0.1'],
'secret-add': ['source=secret1,target=/secret1'],
'secret-rm': ['secret1', 'secret2'],
'config-add': ['config'],
'config-rm': ['config', 'config2'],
'group-rm': ['old'],
'dns-rm': ['old'],
'dns-option-rm': ['old'],
'dns-search-rm': ['old'],
'placement-pref-add': ['spread=new'],
'placement-pref-rm': ['spread=new', 'spread=old,foo=old'],
},
),
remove_single_option_value_from_two_with_custom_name=dict(
init_kwargs=dict(
name='service',
options={
'container-label': 'label=value',
'placement-pref': 'spread=new',
'dns-option': 'new',
'dns-search': 'new',
},
),
service_info=dict(
Spec=dict(
TaskTemplate=dict(
ContainerSpec=dict(
Labels=OrderedDict([
('label', 'value'),
('label2', 'value2'),
]),
DNSConfig=dict(
Options=[
'new',
'old',
],
Search=[
'new',
'old',
],
),
),
Placement=dict(
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='new',
),
),
OrderedDict((
('Spread', dict(
SpreadDescriptor='old',
)),
('Foo', dict(
FooDescriptor='old',
)),
)),
],
),
),
),
),
expected={
'container-label-add': ['label=value'],
'container-label-rm': ['label2'],
'args': '',
'dns-option-rm': ['old'],
'dns-search-rm': ['old'],
'placement-pref-add': ['spread=new'],
'placement-pref-rm': ['spread=new', 'spread=old,foo=old'],
},
),
remove_single_option_value_from_three=dict(
init_kwargs=dict(
name='service',
options=dict(
publish='80-81:80-81/tcp',
mount=[
'type=volume,target=/path',
'type=volume,dst="/path2"',
],
label=[
'label=value',
'label2=value2',
],
env=[
'FOO=bar',
'FOO2=bar2',
],
constraint=[
'node.role == manager',
'node.role == worker',
],
container_label=[
'label=value',
'label2=value2',
],
network=[
'network1',
'network2',
],
host=[
'foo:127.0.0.1',
'bar:127.0.0.1',
],
secret=[
'secret',
'source=secret,target=/secret2',
],
config=[
'config1',
'config2',
],
group=['group1', 'group2'],
placement_pref=['spread=pref1', 'spread=pref2'],
dns=['dns1', 'dns2'],
dns_option=['option1', 'option2'],
dns_search=['domain1', 'domain2'],
),
),
service_info=dict(
Spec=dict(
Labels=OrderedDict([
('label', 'value'),
('label2', 'value2'),
('label3', 'value3'),
]),
TaskTemplate=dict(
ContainerSpec=dict(
Labels=OrderedDict([
('label', 'value'),
('label2', 'value2'),
('label3', 'value3'),
]),
Env=[
'FOO=bar',
'FOO2=bar2',
'FOO3=bar3',
],
Mounts=[
dict(
Type='volume',
Source='/source',
Target='/path',
),
dict(
Type='volume',
Source='/source2',
Target='/path2',
),
dict(
Type='volume',
Source='/source3',
Target='/path3',
),
],
Hosts=[
"127.0.0.1 foo",
"127.0.0.1 bar",
"127.0.0.1 baz",
],
Secrets=[
dict(
File=dict(
Name='secret',
),
SecretID='secret',
SecretName='secret',
),
dict(
File=dict(
Name='/secret2',
),
SecretID='secret',
SecretName='secret',
),
dict(
File=dict(
Name='/secret3',
),
SecretID='secret3',
SecretName='secret3',
),
],
Configs=[
dict(
File=dict(
Name='/config1',
),
ConfigID='config1',
ConfigName='config1',
),
dict(
File=dict(
Name='/config2',
),
ConfigID='config2',
ConfigName='config2',
),
dict(
File=dict(
Name='/config3',
),
ConfigID='config3',
ConfigName='config3',
),
],
Groups=[
'group1',
'group2',
'group3',
],
DNSConfig=dict(
Nameservers=[
'dns1',
'dns2',
'dns3',
],
Options=[
'option1',
'option2',
'option3',
],
Search=[
'domain1',
'domain2',
'domain3',
],
),
),
Placement=dict(
Constraints=[
'node.role == manager',
'node.role == worker',
'constraint',
],
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='pref1',
),
),
dict(
Spread=dict(
SpreadDescriptor='pref2',
),
),
dict(
Spread=dict(
SpreadDescriptor='pref3',
),
),
],
),
Networks=[
{
'Target': 'network1_id',
},
{
'Target': 'network2_id',
},
{
'Target': 'network3_id',
},
],
),
EndpointSpec=dict(
Ports=[
dict(
TargetPort=80,
Protocol='tcp',
PublishedPort=80,
),
dict(
TargetPort=81,
Protocol='tcp',
PublishedPort=81,
),
dict(
TargetPort=82,
Protocol='tcp',
PublishedPort=82,
),
],
),
),
),
expected={
'container-label-add': ['label=value', 'label2=value2'],
'container-label-rm': ['label3'],
'env-add': ['FOO=bar', 'FOO2=bar2'],
'env-rm': ['FOO3'],
'label-add': ['label=value', 'label2=value2'],
'label-rm': ['label3'],
'args': '',
'publish-add': ['80-81:80-81/tcp'],
'publish-rm': [82],
'mount-rm': ['/path3'],
'mount-add': ['type=volume,target=/path', 'type=volume,dst="/path2"'],
'constraint-rm': ['constraint'],
'network-add': ['network1', 'network2'],
'network-rm': ['network1_id', 'network2_id', 'network3_id'],
'host-rm': ['baz:127.0.0.1'],
'secret-add': ['secret', 'source=secret,target=/secret2'],
'secret-rm': ['secret', 'secret3'],
'config-add': ['config1', 'config2'],
'config-rm': ['config1', 'config2', 'config3'],
'group-rm': ['group3'],
'dns-rm': ['dns3'],
'dns-option-rm': ['option3'],
'dns-search-rm': ['domain3'],
'placement-pref-add': ['spread=pref1', 'spread=pref2'],
'placement-pref-rm': ['spread=pref1', 'spread=pref2', 'spread=pref3'],
},
),
remove_single_option_value_from_three_with_custom_name=dict(
init_kwargs=dict(
name='service',
options={
'container_label': ['label=value', 'label2=value2'],
'placement_pref': ['spread=pref1', 'spread=pref2'],
'dns_option': ['option1', 'option2'],
'dns_search': ['domain1', 'domain2'],
},
),
service_info=dict(
Spec=dict(
TaskTemplate=dict(
ContainerSpec=dict(
Labels=OrderedDict([
('label', 'value'),
('label2', 'value2'),
('label3', 'value3'),
]),
DNSConfig=dict(
Options=[
'option1',
'option2',
'option3',
],
Search=[
'domain1',
'domain2',
'domain3',
],
),
),
Placement=dict(
Preferences=[
dict(
Spread=dict(
SpreadDescriptor='pref1',
),
),
dict(
Spread=dict(
SpreadDescriptor='pref2',
),
),
dict(
Spread=dict(
SpreadDescriptor='pref3',
),
),
],
),
),
),
),
expected={
'container-label-add': ['label=value', 'label2=value2'],
'container-label-rm': ['label3'],
'args': '',
'dns-option-rm': ['option3'],
'dns-search-rm': ['domain3'],
'placement-pref-add': ['spread=pref1', 'spread=pref2'],
'placement-pref-rm': ['spread=pref1', 'spread=pref2', 'spread=pref3'],
},
),
)
for case, data in cases.items():
with self.subTest(case=case):
with mock.patch.object(
docker.Service,
'info',
new_callable=mock.PropertyMock,
return_value=data['service_info'],
__delete__=lambda *_: None,
):
service = docker.Service(**data['init_kwargs'])
self.assertDictEqual(
dict(service.update_options),
data['expected'],
)
def test__update_labels(self):
cases = dict(
empty=dict(
service_init_kwargs=dict(),
kwargs={},
expected_service_labels=[],
),
existing_label_str=dict(
service_init_kwargs=dict(options=dict(label='label=label')),
kwargs={},
expected_service_labels=['label=label'],
),
existing_label_list=dict(
service_init_kwargs=dict(options=dict(label=['label1=label1', 'label2=label2'])),
kwargs={},
expected_service_labels=['label1=label1', 'label2=label2'],
),
existing_label_tuple=dict(
service_init_kwargs=dict(options=dict(label=('label1=label1', 'label2=label2'))),
kwargs={},
expected_service_labels=['label1=label1', 'label2=label2'],
),
existing_label_str_add_json=dict(
service_init_kwargs=dict(options=dict(label='label=label')),
kwargs={'new_label': '{"foo": "bar"}'},
expected_service_labels=['label=label', 'new_label={"foo": "bar"}'],
),
existing_label_str_add_one=dict(
service_init_kwargs=dict(options=dict(label='label=label')),
kwargs={'new_label': 'new_label'},
expected_service_labels=['label=label', 'new_label=new_label'],
),
existing_label_object_add_one=dict(
service_init_kwargs=dict(options=dict(label=42)),
kwargs={'new_label': 'new_label'},
expected_service_labels=['42', 'new_label=new_label'],
),
existing_label_list_add_one=dict(
service_init_kwargs=dict(options=dict(label=['label1=label1', 'label2=label2'])),
kwargs={'new_label': 'new_label'},
expected_service_labels=['label1=label1', 'label2=label2', 'new_label=new_label'],
),
existing_label_tuple_add_one=dict(
service_init_kwargs=dict(options=dict(label=('label1=label1', 'label2=label2'))),
kwargs={'new_label': 'new_label'},
expected_service_labels=['label1=label1', 'label2=label2', 'new_label=new_label'],
),
existing_label_str_add_two=dict(
service_init_kwargs=dict(options=dict(label='label=label')),
kwargs={'new_label1': 'new_label1', 'new_label2': 'new_label2'},
expected_service_labels=['label=label', 'new_label1=new_label1', 'new_label2=new_label2'],
),
existing_label_object_add_two=dict(
service_init_kwargs=dict(options=dict(label=42)),
kwargs={'new_label1': 'new_label1', 'new_label2': 'new_label2'},
expected_service_labels=['42', 'new_label1=new_label1', 'new_label2=new_label2'],
),
existing_label_list_add_two=dict(
service_init_kwargs=dict(options=dict(label=['label1=label1', 'label2=label2'])),
kwargs={'new_label1': 'new_label1', 'new_label2': 'new_label2'},
expected_service_labels=['label1=label1', 'label2=label2', 'new_label1=new_label1', 'new_label2=new_label2'],
),
existing_label_tuple_add_two=dict(
service_init_kwargs=dict(options=dict(label=('label1=label1', 'label2=label2'))),
kwargs={'new_label1': 'new_label1', 'new_label2': 'new_label2'},
expected_service_labels=['label1=label1', 'label2=label2', 'new_label1=new_label1', 'new_label2=new_label2'],
),
)
for case, data in cases.items():
with self.subTest(case=case):
service = docker.Service(**data['service_init_kwargs'])
old_labels_id = id(service.label)
service._update_labels(data['kwargs'])
for label in data['expected_service_labels']:
self.assertIn(label, service.label)
self.assertEqual(len(service.label), len(data['expected_service_labels']))
# make sure original container's labels not changed
self.assertNotEqual(id(service.label), old_labels_id)
def test__create_service(self):
cases = dict(
default=dict(
service_init_kwargs=dict(name='service'),
expected_args={
'executable': ['docker', 'service', 'create'],
'image': ['image:tag'],
'name': 'service',
'args': [],
},
),
custom_image=dict(
service_init_kwargs=dict(name='service', image='custom'),
expected_args={
'executable': ['docker', 'service', 'create'],
'image': ['image:tag'],
'name': 'service',
'args': [],
},
),
custom_command=dict(
service_init_kwargs=dict(name='service', command='command'),
expected_args={
'executable': ['docker', 'service', 'create'],
'image': ['image:tag'],
'name': 'service',
'args': ['command'],
},
),
custom_args=dict(
service_init_kwargs=dict(name='service', args='arg1 arg2'),
expected_args={
'executable': ['docker', 'service', 'create'],
'image': ['image:tag'],
'name': 'service',
'args': ['arg1', 'arg2'],
},
),
custom_command_and_args=dict(
service_init_kwargs=dict(
name='service',
command='command',
args='arg1 arg2',
),
expected_args={
'executable': ['docker', 'service', 'create'],
'image': ['image:tag'],
'name': 'service',
'args': ['command', 'arg1', 'arg2'],
},
),
complex=dict(
service_init_kwargs=dict(
name='service',
command='command1 command2',
args='arg1 arg2',
options=dict(
mount=['mount1', 'mount2'],
constraint=['constraint1', 'constraint2'],
container_label=['c_label1', 'c_label2'],
label=['label1', 'label2'],
env=['en1', 'env2'],
publish=['port1', 'port2'],
replicas=5,
network='network',
restart_condition='restart_condition',
user='user',
stop_grace_period=20,
host=['foo:127.0.0.1', 'bar:127.0.0.1'],
secret=['secret1', 'secret2'],
config=['config1', 'config2'],
group=['group1', 'group2'],
placement_pref=['pref1', 'pref2'],
dns=['dns1', 'dns2'],
dns_option=['dns-option1', 'dns-option2'],
dns_search=['domain1', 'domain2'],
),
mode='mode',
),
expected_args={
'executable': ['docker', 'service', 'create'],
'args': ['command1', 'command2', 'arg1', 'arg2'],
'network': 'network',
'mode': 'mode',
'constraint': ['constraint1', 'constraint2'],
'mount': ['mount1', 'mount2'],
'replicas': '5',
'publish': ['port1', 'port2'],
'label': ['label1', 'label2'],
'container-label': ['c_label1', 'c_label2'],
'user': 'user',
'env': ['en1', 'env2'],
'host': ['foo:127.0.0.1', 'bar:127.0.0.1'],
'secret': ['secret1', 'secret2'],
'config': ['config1', 'config2'],
'group': ['group1', 'group2'],
'placement-pref': ['pref1', 'pref2'],
'dns': ['dns1', 'dns2'],
'dns-option': ['dns-option1', 'dns-option2'],
'dns-search': ['domain1', 'domain2'],
'stop-grace-period': '20',
'restart-condition': 'restart_condition',
'image': ['image:tag'],
'name': 'service',
},
),
)
def test_command(command, *args, **kwargs):
args = shlex.split(command)
options = docker_service_create_args_parser.parse_args(args)
self.assertDictEqual(vars(options), data['expected_args'])
image = docker.Image('image:tag')
for case, data in cases.items():
with self.subTest(case=case):
service = docker.Service(**data['service_init_kwargs'])
with mock.patch.object(
fabricio,
'run',
side_effect=test_command,
) as run:
service._create_service(image)
run.assert_called_once()
| 42.750743
| 154
| 0.379161
| 11,770
| 172,713
| 5.405098
| 0.038658
| 0.028294
| 0.026188
| 0.027728
| 0.827769
| 0.791396
| 0.741913
| 0.704848
| 0.659562
| 0.625546
| 0
| 0.01982
| 0.513025
| 172,713
| 4,039
| 155
| 42.761327
| 0.736574
| 0.014017
| 0
| 0.755601
| 0
| 0
| 0.157514
| 0.021866
| 0
| 0
| 0
| 0.000248
| 0.036405
| 1
| 0.014002
| false
| 0
| 0.002546
| 0.003055
| 0.023422
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c2658d10dfa4dad642c3a4c1c0c4aad98986e5e9
| 10,968
|
py
|
Python
|
tests/test_multipart_render.py
|
jframos/sdklib
|
0cc1126e94b823fad6cc47e6a00549cad6d2f771
|
[
"BSD-2-Clause"
] | 3
|
2016-12-15T15:54:37.000Z
|
2021-08-10T03:16:18.000Z
|
tests/test_multipart_render.py
|
jframos/sdklib
|
0cc1126e94b823fad6cc47e6a00549cad6d2f771
|
[
"BSD-2-Clause"
] | 44
|
2016-04-13T08:19:45.000Z
|
2022-01-14T12:58:44.000Z
|
tests/test_multipart_render.py
|
jframos/sdklib
|
0cc1126e94b823fad6cc47e6a00549cad6d2f771
|
[
"BSD-2-Clause"
] | 5
|
2016-11-22T11:23:28.000Z
|
2020-01-28T12:26:10.000Z
|
import unittest
from sdklib.http.renderers import MultiPartRenderer
from sdklib.util.files import guess_filename_stream
class TestMultiPartRender(unittest.TestCase):
def test_encode_multipart_data_files(self):
files = {"file_upload": "tests/resources/file.pdf", "file_upload2": "tests/resources/file.png"}
data = {"param1": "value1", "param2": "value2"}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
self.assertIn(b"file_upload2", body)
self.assertIn(b"file.png", body)
self.assertIn(b"Content-Type: image/png", body)
def test_encode_multipart_data_as_2tuple_files(self):
files = {"file_upload": "tests/resources/file.pdf", "file_upload2": "tests/resources/file.png"}
data = {"param1": ("value1", "myContentType"), "param2": "value2"}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(
b'------------ThIs_Is_tHe_bouNdaRY\r\nContent-Disposition: form-data; name="param1"\r\nContent-Type: myContentType\r\n\r\nvalue1\r\n',
body)
self.assertIn(
b'------------ThIs_Is_tHe_bouNdaRY\r\nContent-Disposition: form-data; name="param2"\r\n\r\nvalue2\r\n',
body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
self.assertIn(b"file_upload2", body)
self.assertIn(b"file.png", body)
self.assertIn(b"Content-Type: image/png", body)
def test_encode_multipart_data_files_as_2tuple_parameter(self):
filename, stream = guess_filename_stream("tests/resources/file.pdf")
filename2, stream2 = guess_filename_stream("tests/resources/file.png")
files = {"file_upload1": (filename, stream), "file_upload2": (filename2, stream2)}
data = {"param1": "value1", "param2": "value2"}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertNotIn(b"application/pdf", body)
self.assertIn(b"file_upload2", body)
self.assertIn(b"file.png", body)
self.assertNotIn(b"Content-Type: image/png", body)
def test_encode_multipart_data_files_as_3tuple_parameter(self):
filename, stream = guess_filename_stream("tests/resources/file.pdf")
files = {"file_upload1": (filename, stream, "application/xxx")}
data = {"param1": "value1", "param2": "value2"}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/xxx", body)
def test_encode_multipart_data_files_as_4tuple_parameter(self):
filename, stream = guess_filename_stream("tests/resources/file.pdf")
files = {"file_upload1": (filename, stream, "application/xxx", {"x-header": "value", "time": "now"})}
data = {"param1": "value1", "param2": "value2"}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/xxx", body)
self.assertIn(b"x-header: value", body)
self.assertIn(b"time: now", body)
def test_encode_multipart_data_and_no_files(self):
files = {"file_upload": "tests/resources/file.pdf"}
r = MultiPartRenderer()
body, content_type = r.encode_params(None, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
def test_encode_multipart_no_data_and_files(self):
data = {"param1": "value1", "param2": "value2"}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, None)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
def test_encode_multipart_data_files_using_boundary_as_init_parameter(self):
files = {"file_upload": "tests/resources/file.pdf"}
data = {"param1": "value1", "param2": "value2"}
r = MultiPartRenderer("custom_boundary")
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=custom_boundary")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=custom_boundary")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
def test_encode_multipart_data_files_using_boundary_as_parameter(self):
files = {"file_upload": "tests/resources/file.pdf"}
data = {"param1": "value1", "param2": "value2"}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files, boundary="custom_boundary")
self.assertEqual(content_type, "multipart/form-data; boundary=custom_boundary")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"value2", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
def test_encode_multipart_data_files_boolean(self):
files = {"file_upload": "tests/resources/file.pdf"}
data = {"param1": "value1", "param2": True}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files, boundary="custom_boundary")
self.assertEqual(content_type, "multipart/form-data; boundary=custom_boundary")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"true", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"true", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
def test_encode_multipart_data_files_none(self):
files = {"file_upload": "tests/resources/file.pdf"}
data = {"param1": "value1", "param2": None}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files, boundary="custom_boundary")
self.assertEqual(content_type, "multipart/form-data; boundary=custom_boundary")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"null", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
body, content_type = r.encode_params(data, files)
self.assertEqual(content_type, "multipart/form-data; boundary=----------ThIs_Is_tHe_bouNdaRY")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"null", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
def test_encode_multipart_data_files_none_csharp(self):
files = {"file_upload": "tests/resources/file.pdf"}
data = {"param1": "value1", "param2": None}
r = MultiPartRenderer()
body, content_type = r.encode_params(data, files, boundary="custom_boundary", output_str='csharp')
self.assertEqual(content_type, "multipart/form-data; boundary=custom_boundary")
self.assertIn(b"param1", body)
self.assertIn(b"value1", body)
self.assertIn(b"param2", body)
self.assertIn(b"Null", body)
self.assertIn(b"file_upload", body)
self.assertIn(b"file.pdf", body)
self.assertIn(b"Content-Type: application/pdf", body)
| 46.474576
| 146
| 0.651167
| 1,366
| 10,968
| 5.071742
| 0.059297
| 0.193995
| 0.210162
| 0.235566
| 0.933747
| 0.929417
| 0.914838
| 0.914838
| 0.903435
| 0.89694
| 0
| 0.012889
| 0.200675
| 10,968
| 235
| 147
| 46.67234
| 0.777347
| 0
| 0
| 0.838235
| 0
| 0.009804
| 0.289023
| 0.097921
| 0
| 0
| 0
| 0
| 0.637255
| 1
| 0.058824
| false
| 0
| 0.014706
| 0
| 0.078431
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
c28da6608b90826419741746fdcb22e6e69bd3c1
| 27,207
|
py
|
Python
|
nova/tests/unit/test_wsgi.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/test_wsgi.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/test_wsgi.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2011 United States Government as represented by the'
nl|'\n'
comment|'# Administrator of the National Aeronautics and Space Administration.'
nl|'\n'
comment|'# All Rights Reserved.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
string|'"""Unit tests for `nova.wsgi`."""'
newline|'\n'
nl|'\n'
name|'import'
name|'os'
op|'.'
name|'path'
newline|'\n'
name|'import'
name|'socket'
newline|'\n'
name|'import'
name|'tempfile'
newline|'\n'
nl|'\n'
name|'import'
name|'eventlet'
newline|'\n'
name|'import'
name|'eventlet'
op|'.'
name|'wsgi'
newline|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'from'
name|'oslo_config'
name|'import'
name|'cfg'
newline|'\n'
name|'import'
name|'requests'
newline|'\n'
name|'import'
name|'testtools'
newline|'\n'
name|'import'
name|'webob'
newline|'\n'
nl|'\n'
name|'import'
name|'nova'
op|'.'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'utils'
newline|'\n'
name|'import'
name|'nova'
op|'.'
name|'wsgi'
newline|'\n'
nl|'\n'
DECL|variable|SSL_CERT_DIR
name|'SSL_CERT_DIR'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'normpath'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
nl|'\n'
name|'os'
op|'.'
name|'path'
op|'.'
name|'dirname'
op|'('
name|'os'
op|'.'
name|'path'
op|'.'
name|'abspath'
op|'('
name|'__file__'
op|')'
op|')'
op|','
nl|'\n'
string|"'ssl_cert'"
op|')'
op|')'
newline|'\n'
DECL|variable|CONF
name|'CONF'
op|'='
name|'cfg'
op|'.'
name|'CONF'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestLoaderNothingExists
name|'class'
name|'TestLoaderNothingExists'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Loader tests where os.path.exists always returns False."""'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'TestLoaderNothingExists'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'os.path.exists'"
op|','
name|'lambda'
name|'_'
op|':'
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_relpath_config_not_found
dedent|''
name|'def'
name|'test_relpath_config_not_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'api_paste_config'
op|'='
string|"'api-paste.ini'"
op|','
name|'group'
op|'='
string|"'wsgi'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
nl|'\n'
name|'nova'
op|'.'
name|'exception'
op|'.'
name|'ConfigNotFound'
op|','
nl|'\n'
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Loader'
op|','
nl|'\n'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_asbpath_config_not_found
dedent|''
name|'def'
name|'test_asbpath_config_not_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'api_paste_config'
op|'='
string|"'/etc/nova/api-paste.ini'"
op|','
name|'group'
op|'='
string|"'wsgi'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
nl|'\n'
name|'nova'
op|'.'
name|'exception'
op|'.'
name|'ConfigNotFound'
op|','
nl|'\n'
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Loader'
op|','
nl|'\n'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestLoaderNormalFilesystem
dedent|''
dedent|''
name|'class'
name|'TestLoaderNormalFilesystem'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Loader tests with normal filesystem (unmodified os.path module)."""'
newline|'\n'
nl|'\n'
name|'_paste_config'
op|'='
string|'"""\n[app:test_app]\nuse = egg:Paste#static\ndocument_root = /tmp\n """'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'TestLoaderNormalFilesystem'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'config'
op|'='
name|'tempfile'
op|'.'
name|'NamedTemporaryFile'
op|'('
name|'mode'
op|'='
string|'"w+t"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'config'
op|'.'
name|'write'
op|'('
name|'self'
op|'.'
name|'_paste_config'
op|'.'
name|'lstrip'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'config'
op|'.'
name|'seek'
op|'('
number|'0'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'config'
op|'.'
name|'flush'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'loader'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Loader'
op|'('
name|'self'
op|'.'
name|'config'
op|'.'
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_config_found
dedent|''
name|'def'
name|'test_config_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'self'
op|'.'
name|'config'
op|'.'
name|'name'
op|','
name|'self'
op|'.'
name|'loader'
op|'.'
name|'config_path'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_app_not_found
dedent|''
name|'def'
name|'test_app_not_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
nl|'\n'
name|'nova'
op|'.'
name|'exception'
op|'.'
name|'PasteAppNotFound'
op|','
nl|'\n'
name|'self'
op|'.'
name|'loader'
op|'.'
name|'load_app'
op|','
nl|'\n'
string|'"nonexistent app"'
op|','
nl|'\n'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_app_found
dedent|''
name|'def'
name|'test_app_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'url_parser'
op|'='
name|'self'
op|'.'
name|'loader'
op|'.'
name|'load_app'
op|'('
string|'"test_app"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"/tmp"'
op|','
name|'url_parser'
op|'.'
name|'directory'
op|')'
newline|'\n'
nl|'\n'
DECL|member|tearDown
dedent|''
name|'def'
name|'tearDown'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'config'
op|'.'
name|'close'
op|'('
op|')'
newline|'\n'
name|'super'
op|'('
name|'TestLoaderNormalFilesystem'
op|','
name|'self'
op|')'
op|'.'
name|'tearDown'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestWSGIServer
dedent|''
dedent|''
name|'class'
name|'TestWSGIServer'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""WSGI server tests."""'
newline|'\n'
nl|'\n'
DECL|member|test_no_app
name|'def'
name|'test_no_app'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_app"'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"test_app"'
op|','
name|'server'
op|'.'
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_custom_max_header_line
dedent|''
name|'def'
name|'test_custom_max_header_line'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'max_header_line'
op|'='
number|'4096'
op|','
name|'group'
op|'='
string|"'wsgi'"
op|')'
comment|'# Default is 16384'
newline|'\n'
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_custom_max_header_line"'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'CONF'
op|'.'
name|'wsgi'
op|'.'
name|'max_header_line'
op|','
nl|'\n'
name|'eventlet'
op|'.'
name|'wsgi'
op|'.'
name|'MAX_HEADER_LINE'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_start_random_port
dedent|''
name|'def'
name|'test_start_random_port'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_random_port"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'port'
op|'='
number|'0'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
number|'0'
op|','
name|'server'
op|'.'
name|'port'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'testtools'
op|'.'
name|'skipIf'
op|'('
name|'not'
name|'utils'
op|'.'
name|'is_ipv6_supported'
op|'('
op|')'
op|','
string|'"no ipv6 support"'
op|')'
newline|'\n'
DECL|member|test_start_random_port_with_ipv6
name|'def'
name|'test_start_random_port_with_ipv6'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_random_port"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"::1"'
op|','
name|'port'
op|'='
number|'0'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"::1"'
op|','
name|'server'
op|'.'
name|'host'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
number|'0'
op|','
name|'server'
op|'.'
name|'port'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'testtools'
op|'.'
name|'skipIf'
op|'('
name|'not'
name|'utils'
op|'.'
name|'is_linux'
op|'('
op|')'
op|','
string|"'SO_REUSEADDR behaves differently '"
nl|'\n'
string|"'on OSX and BSD, see bugs '"
nl|'\n'
string|"'1436895 and 1467145'"
op|')'
newline|'\n'
DECL|member|test_socket_options_for_simple_server
name|'def'
name|'test_socket_options_for_simple_server'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# test normal socket options has set properly'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'tcp_keepidle'
op|'='
number|'500'
op|','
name|'group'
op|'='
string|"'wsgi'"
op|')'
newline|'\n'
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_socket_options"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'port'
op|'='
number|'0'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'sock'
op|'='
name|'server'
op|'.'
name|'_socket'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'sock'
op|'.'
name|'getsockopt'
op|'('
name|'socket'
op|'.'
name|'SOL_SOCKET'
op|','
nl|'\n'
name|'socket'
op|'.'
name|'SO_REUSEADDR'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'sock'
op|'.'
name|'getsockopt'
op|'('
name|'socket'
op|'.'
name|'SOL_SOCKET'
op|','
nl|'\n'
name|'socket'
op|'.'
name|'SO_KEEPALIVE'
op|')'
op|')'
newline|'\n'
name|'if'
name|'hasattr'
op|'('
name|'socket'
op|','
string|"'TCP_KEEPIDLE'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'CONF'
op|'.'
name|'wsgi'
op|'.'
name|'tcp_keepidle'
op|','
nl|'\n'
name|'sock'
op|'.'
name|'getsockopt'
op|'('
name|'socket'
op|'.'
name|'IPPROTO_TCP'
op|','
nl|'\n'
name|'socket'
op|'.'
name|'TCP_KEEPIDLE'
op|')'
op|')'
newline|'\n'
dedent|''
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_server_pool_waitall
dedent|''
name|'def'
name|'test_server_pool_waitall'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# test pools waitall method gets called while stopping server'
nl|'\n'
indent|' '
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_server"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'server'
op|'.'
name|'_pool'
op|','
nl|'\n'
string|"'waitall'"
op|')'
name|'as'
name|'mock_waitall'
op|':'
newline|'\n'
indent|' '
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
name|'mock_waitall'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_uri_length_limit
dedent|''
dedent|''
name|'def'
name|'test_uri_length_limit'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_uri_length_limit"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'max_url_len'
op|'='
number|'16384'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'uri'
op|'='
string|'"http://127.0.0.1:%d/%s"'
op|'%'
op|'('
name|'server'
op|'.'
name|'port'
op|','
number|'10000'
op|'*'
string|"'x'"
op|')'
newline|'\n'
name|'resp'
op|'='
name|'requests'
op|'.'
name|'get'
op|'('
name|'uri'
op|','
name|'proxies'
op|'='
op|'{'
string|'"http"'
op|':'
string|'""'
op|'}'
op|')'
newline|'\n'
name|'eventlet'
op|'.'
name|'sleep'
op|'('
number|'0'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
name|'resp'
op|'.'
name|'status_code'
op|','
nl|'\n'
name|'requests'
op|'.'
name|'codes'
op|'.'
name|'REQUEST_URI_TOO_LARGE'
op|')'
newline|'\n'
nl|'\n'
name|'uri'
op|'='
string|'"http://127.0.0.1:%d/%s"'
op|'%'
op|'('
name|'server'
op|'.'
name|'port'
op|','
number|'20000'
op|'*'
string|"'x'"
op|')'
newline|'\n'
name|'resp'
op|'='
name|'requests'
op|'.'
name|'get'
op|'('
name|'uri'
op|','
name|'proxies'
op|'='
op|'{'
string|'"http"'
op|':'
string|'""'
op|'}'
op|')'
newline|'\n'
name|'eventlet'
op|'.'
name|'sleep'
op|'('
number|'0'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'resp'
op|'.'
name|'status_code'
op|','
nl|'\n'
name|'requests'
op|'.'
name|'codes'
op|'.'
name|'REQUEST_URI_TOO_LARGE'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_reset_pool_size_to_default
dedent|''
name|'def'
name|'test_reset_pool_size_to_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_resize"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'max_url_len'
op|'='
number|'16384'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# Stopping the server, which in turn sets pool size to 0'
nl|'\n'
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'server'
op|'.'
name|'_pool'
op|'.'
name|'size'
op|','
number|'0'
op|')'
newline|'\n'
nl|'\n'
comment|'# Resetting pool size to default'
nl|'\n'
name|'server'
op|'.'
name|'reset'
op|'('
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'server'
op|'.'
name|'_pool'
op|'.'
name|'size'
op|','
name|'CONF'
op|'.'
name|'wsgi'
op|'.'
name|'default_pool_size'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_client_socket_timeout
dedent|''
name|'def'
name|'test_client_socket_timeout'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'client_socket_timeout'
op|'='
number|'5'
op|','
name|'group'
op|'='
string|"'wsgi'"
op|')'
newline|'\n'
nl|'\n'
comment|'# mocking eventlet spawn method to check it is called with'
nl|'\n'
comment|"# configured 'client_socket_timeout' value."
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'eventlet'
op|','
nl|'\n'
string|"'spawn'"
op|')'
name|'as'
name|'mock_spawn'
op|':'
newline|'\n'
indent|' '
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_app"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'port'
op|'='
number|'0'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'_'
op|','
name|'kwargs'
op|'='
name|'mock_spawn'
op|'.'
name|'call_args'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'CONF'
op|'.'
name|'wsgi'
op|'.'
name|'client_socket_timeout'
op|','
nl|'\n'
name|'kwargs'
op|'['
string|"'socket_timeout'"
op|']'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_keep_alive
dedent|''
dedent|''
name|'def'
name|'test_keep_alive'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'keep_alive'
op|'='
name|'False'
op|','
name|'group'
op|'='
string|"'wsgi'"
op|')'
newline|'\n'
nl|'\n'
comment|'# mocking eventlet spawn method to check it is called with'
nl|'\n'
comment|"# configured 'keep_alive' value."
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'eventlet'
op|','
nl|'\n'
string|"'spawn'"
op|')'
name|'as'
name|'mock_spawn'
op|':'
newline|'\n'
indent|' '
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_app"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'port'
op|'='
number|'0'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'_'
op|','
name|'kwargs'
op|'='
name|'mock_spawn'
op|'.'
name|'call_args'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'CONF'
op|'.'
name|'wsgi'
op|'.'
name|'keep_alive'
op|','
nl|'\n'
name|'kwargs'
op|'['
string|"'keepalive'"
op|']'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestWSGIServerWithSSL
dedent|''
dedent|''
dedent|''
name|'class'
name|'TestWSGIServerWithSSL'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""WSGI server with SSL tests."""'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'TestWSGIServerWithSSL'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'enabled_ssl_apis'
op|'='
op|'['
string|"'fake_ssl'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
nl|'\n'
name|'ssl_cert_file'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'SSL_CERT_DIR'
op|','
string|"'certificate.crt'"
op|')'
op|','
nl|'\n'
name|'ssl_key_file'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'SSL_CERT_DIR'
op|','
string|"'privatekey.key'"
op|')'
op|','
nl|'\n'
name|'group'
op|'='
string|"'wsgi'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ssl_server
dedent|''
name|'def'
name|'test_ssl_server'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|function|test_app
indent|' '
name|'def'
name|'test_app'
op|'('
name|'env'
op|','
name|'start_response'
op|')'
op|':'
newline|'\n'
indent|' '
name|'start_response'
op|'('
string|"'200 OK'"
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'return'
op|'['
string|"'PONG'"
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'fake_ssl_server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"fake_ssl"'
op|','
name|'test_app'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'port'
op|'='
number|'0'
op|','
nl|'\n'
name|'use_ssl'
op|'='
name|'True'
op|')'
newline|'\n'
name|'fake_ssl_server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
number|'0'
op|','
name|'fake_ssl_server'
op|'.'
name|'port'
op|')'
newline|'\n'
nl|'\n'
name|'response'
op|'='
name|'requests'
op|'.'
name|'post'
op|'('
nl|'\n'
string|"'https://127.0.0.1:%s/'"
op|'%'
name|'fake_ssl_server'
op|'.'
name|'port'
op|','
nl|'\n'
name|'verify'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'SSL_CERT_DIR'
op|','
string|"'ca.crt'"
op|')'
op|','
name|'data'
op|'='
string|"'PING'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'text'
op|','
string|"'PONG'"
op|')'
newline|'\n'
nl|'\n'
name|'fake_ssl_server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'fake_ssl_server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_two_servers
dedent|''
name|'def'
name|'test_two_servers'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|function|test_app
indent|' '
name|'def'
name|'test_app'
op|'('
name|'env'
op|','
name|'start_response'
op|')'
op|':'
newline|'\n'
indent|' '
name|'start_response'
op|'('
string|"'200 OK'"
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'return'
op|'['
string|"'PONG'"
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'fake_ssl_server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"fake_ssl"'
op|','
name|'test_app'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'port'
op|'='
number|'0'
op|','
name|'use_ssl'
op|'='
name|'True'
op|')'
newline|'\n'
name|'fake_ssl_server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
number|'0'
op|','
name|'fake_ssl_server'
op|'.'
name|'port'
op|')'
newline|'\n'
nl|'\n'
name|'fake_server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"fake"'
op|','
name|'test_app'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'port'
op|'='
number|'0'
op|')'
newline|'\n'
name|'fake_server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
number|'0'
op|','
name|'fake_server'
op|'.'
name|'port'
op|')'
newline|'\n'
nl|'\n'
name|'response'
op|'='
name|'requests'
op|'.'
name|'post'
op|'('
nl|'\n'
string|"'https://127.0.0.1:%s/'"
op|'%'
name|'fake_ssl_server'
op|'.'
name|'port'
op|','
nl|'\n'
name|'verify'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'SSL_CERT_DIR'
op|','
string|"'ca.crt'"
op|')'
op|','
name|'data'
op|'='
string|"'PING'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'text'
op|','
string|"'PONG'"
op|')'
newline|'\n'
nl|'\n'
name|'response'
op|'='
name|'requests'
op|'.'
name|'post'
op|'('
string|"'http://127.0.0.1:%s/'"
op|'%'
name|'fake_server'
op|'.'
name|'port'
op|','
nl|'\n'
name|'data'
op|'='
string|"'PING'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'response'
op|'.'
name|'text'
op|','
string|"'PONG'"
op|')'
newline|'\n'
nl|'\n'
name|'fake_ssl_server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'fake_ssl_server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
name|'fake_server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'fake_server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'testtools'
op|'.'
name|'skipIf'
op|'('
name|'not'
name|'utils'
op|'.'
name|'is_linux'
op|'('
op|')'
op|','
string|"'SO_REUSEADDR behaves differently '"
nl|'\n'
string|"'on OSX and BSD, see bugs '"
nl|'\n'
string|"'1436895 and 1467145'"
op|')'
newline|'\n'
DECL|member|test_socket_options_for_ssl_server
name|'def'
name|'test_socket_options_for_ssl_server'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# test normal socket options has set properly'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'tcp_keepidle'
op|'='
number|'500'
op|','
name|'group'
op|'='
string|"'wsgi'"
op|')'
newline|'\n'
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"test_socket_options"'
op|','
name|'None'
op|','
nl|'\n'
name|'host'
op|'='
string|'"127.0.0.1"'
op|','
name|'port'
op|'='
number|'0'
op|','
nl|'\n'
name|'use_ssl'
op|'='
name|'True'
op|')'
newline|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
name|'sock'
op|'='
name|'server'
op|'.'
name|'_socket'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'sock'
op|'.'
name|'getsockopt'
op|'('
name|'socket'
op|'.'
name|'SOL_SOCKET'
op|','
nl|'\n'
name|'socket'
op|'.'
name|'SO_REUSEADDR'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'sock'
op|'.'
name|'getsockopt'
op|'('
name|'socket'
op|'.'
name|'SOL_SOCKET'
op|','
nl|'\n'
name|'socket'
op|'.'
name|'SO_KEEPALIVE'
op|')'
op|')'
newline|'\n'
name|'if'
name|'hasattr'
op|'('
name|'socket'
op|','
string|"'TCP_KEEPIDLE'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'CONF'
op|'.'
name|'wsgi'
op|'.'
name|'tcp_keepidle'
op|','
nl|'\n'
name|'sock'
op|'.'
name|'getsockopt'
op|'('
name|'socket'
op|'.'
name|'IPPROTO_TCP'
op|','
nl|'\n'
name|'socket'
op|'.'
name|'TCP_KEEPIDLE'
op|')'
op|')'
newline|'\n'
dedent|''
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'testtools'
op|'.'
name|'skipIf'
op|'('
name|'not'
name|'utils'
op|'.'
name|'is_ipv6_supported'
op|'('
op|')'
op|','
string|'"no ipv6 support"'
op|')'
newline|'\n'
DECL|member|test_app_using_ipv6_and_ssl
name|'def'
name|'test_app_using_ipv6_and_ssl'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'greetings'
op|'='
string|"'Hello, World!!!'"
newline|'\n'
nl|'\n'
op|'@'
name|'webob'
op|'.'
name|'dec'
op|'.'
name|'wsgify'
newline|'\n'
DECL|function|hello_world
name|'def'
name|'hello_world'
op|'('
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'greetings'
newline|'\n'
nl|'\n'
dedent|''
name|'server'
op|'='
name|'nova'
op|'.'
name|'wsgi'
op|'.'
name|'Server'
op|'('
string|'"fake_ssl"'
op|','
nl|'\n'
name|'hello_world'
op|','
nl|'\n'
name|'host'
op|'='
string|'"::1"'
op|','
nl|'\n'
name|'port'
op|'='
number|'0'
op|','
nl|'\n'
name|'use_ssl'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'server'
op|'.'
name|'start'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'response'
op|'='
name|'requests'
op|'.'
name|'get'
op|'('
string|"'https://[::1]:%d/'"
op|'%'
name|'server'
op|'.'
name|'port'
op|','
nl|'\n'
name|'verify'
op|'='
name|'os'
op|'.'
name|'path'
op|'.'
name|'join'
op|'('
name|'SSL_CERT_DIR'
op|','
string|"'ca.crt'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'greetings'
op|','
name|'response'
op|'.'
name|'text'
op|')'
newline|'\n'
nl|'\n'
name|'server'
op|'.'
name|'stop'
op|'('
op|')'
newline|'\n'
name|'server'
op|'.'
name|'wait'
op|'('
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.129737
| 88
| 0.590657
| 4,122
| 27,207
| 3.820718
| 0.074478
| 0.176773
| 0.099689
| 0.070862
| 0.853007
| 0.806146
| 0.774017
| 0.73992
| 0.710966
| 0.691726
| 0
| 0.008603
| 0.102768
| 27,207
| 2,242
| 89
| 12.135147
| 0.636557
| 0
| 0
| 0.942908
| 0
| 0.000446
| 0.351895
| 0.029735
| 0
| 0
| 0
| 0
| 0.013381
| 0
| null | null | 0
| 0.006244
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c2b42e446c4e5028c52852a80265bd09d8bf9a3c
| 4,584
|
py
|
Python
|
tests/unit/dataactvalidator/test_c23_award_financial_2.py
|
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
3de8cedf69d5a0c9fad8239734bd6291cf583936
|
[
"CC0-1.0"
] | null | null | null |
tests/unit/dataactvalidator/test_c23_award_financial_2.py
|
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
3de8cedf69d5a0c9fad8239734bd6291cf583936
|
[
"CC0-1.0"
] | null | null | null |
tests/unit/dataactvalidator/test_c23_award_financial_2.py
|
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
3de8cedf69d5a0c9fad8239734bd6291cf583936
|
[
"CC0-1.0"
] | null | null | null |
from random import choice
from string import ascii_uppercase, ascii_lowercase, digits
from tests.unit.dataactcore.factories.staging import AwardFinancialFactory, AwardProcurementFactory
from tests.unit.dataactvalidator.utils import number_of_errors, query_columns
_FILE = 'c23_award_financial_2'
def test_column_headers(database):
expected_subset = {"row_number", "transaction_obligated_amou_sum", "federal_action_obligation_sum"}
actual = set(query_columns(_FILE, database))
assert expected_subset <= actual
def test_success(database):
""" Test that a four digit object class with no flag is a success, and a three digit object class with a flag is a success"""
# Create a 12 character random parent_award_id
parent_award_id = ''.join(choice(ascii_uppercase + ascii_lowercase + digits) for i in range(12))
parent_award_id_two = ''.join(choice(ascii_uppercase + ascii_lowercase + digits) for i in range(12))
parent_award_id_three = ''.join(choice(ascii_uppercase + ascii_lowercase + digits) for i in range(12))
first_parent_award_id_row_one = AwardFinancialFactory(transaction_obligated_amou = 1100, parent_award_id = parent_award_id,
allocation_transfer_agency = None)
first_parent_award_id_row_two = AwardFinancialFactory(transaction_obligated_amou = 11, parent_award_id = parent_award_id,
allocation_transfer_agency = None)
# And add a row for a different parent_award_id
second_parent_award_id_row_one = AwardFinancialFactory(transaction_obligated_amou = 9999, parent_award_id = parent_award_id_two,
allocation_transfer_agency = None)
third_parent_award_id_row_one = AwardFinancialFactory(transaction_obligated_amou = 8888, parent_award_id = parent_award_id_three,
allocation_transfer_agency = 123)
first_ap_row = AwardProcurementFactory(parent_award_id = parent_award_id, federal_action_obligation = -1100)
second_ap_row = AwardProcurementFactory(parent_award_id = parent_award_id, federal_action_obligation = -10)
third_ap_row = AwardProcurementFactory(parent_award_id = parent_award_id, federal_action_obligation = -1)
other_parent_award_id_ap_row = AwardProcurementFactory(parent_award_id = parent_award_id_two, federal_action_obligation = -9999)
third_parent_award_id_ap_row = AwardProcurementFactory(parent_award_id = parent_award_id_three, federal_action_obligation = -9999)
errors = number_of_errors(_FILE, database, models=[first_parent_award_id_row_one, first_parent_award_id_row_two,
second_parent_award_id_row_one, first_ap_row, second_ap_row, third_ap_row, other_parent_award_id_ap_row,
third_parent_award_id_row_one, third_parent_award_id_ap_row])
assert errors == 0
def test_failure(database):
""" Test that a three digit object class with no flag is an error"""
# Create a 12 character random parent_award_id
parent_award_id = ''.join(choice(ascii_uppercase + ascii_lowercase + digits) for i in range(12))
parent_award_id_two = ''.join(choice(ascii_uppercase + ascii_lowercase + digits) for i in range(12))
first_parent_award_id_row_one = AwardFinancialFactory(transaction_obligated_amou = 1100, parent_award_id = parent_award_id,
allocation_transfer_agency = None)
first_parent_award_id_row_two = AwardFinancialFactory(transaction_obligated_amou = 11, parent_award_id = parent_award_id,
allocation_transfer_agency = None)
# And add a row that shouldn't be included
second_parent_award_id_row_one = AwardFinancialFactory(transaction_obligated_amou = 9999, parent_award_id = parent_award_id_two,
allocation_transfer_agency = None)
first_ap_row = AwardProcurementFactory(parent_award_id = parent_award_id, federal_action_obligation = -1100)
second_ap_row = AwardProcurementFactory(parent_award_id = parent_award_id, federal_action_obligation = -10)
other_parent_award_id_ap_row = AwardProcurementFactory(parent_award_id = parent_award_id_two, federal_action_obligation = -1111)
errors = number_of_errors(_FILE, database, models=[first_parent_award_id_row_one, first_parent_award_id_row_two, second_parent_award_id_row_one, first_ap_row, second_ap_row, other_parent_award_id_ap_row])
assert errors == 2
| 75.147541
| 208
| 0.744764
| 592
| 4,584
| 5.278716
| 0.16723
| 0.20416
| 0.24128
| 0.10336
| 0.80512
| 0.79424
| 0.77152
| 0.72864
| 0.71232
| 0.69184
| 0
| 0.019299
| 0.197426
| 4,584
| 60
| 209
| 76.4
| 0.830117
| 0.078098
| 0
| 0.454545
| 0
| 0
| 0.021378
| 0.019002
| 0
| 0
| 0
| 0
| 0.068182
| 1
| 0.068182
| false
| 0
| 0.090909
| 0
| 0.159091
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c2d197cb4096f7a5ac060db2c899aeb643a9c5d2
| 4,700
|
py
|
Python
|
tests/test_iterable.py
|
setivolkylany/PythonUtils
|
3e41be7f7cda24c1aa2dbf2f025b0bfb9c2746e6
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_iterable.py
|
setivolkylany/PythonUtils
|
3e41be7f7cda24c1aa2dbf2f025b0bfb9c2746e6
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_iterable.py
|
setivolkylany/PythonUtils
|
3e41be7f7cda24c1aa2dbf2f025b0bfb9c2746e6
|
[
"BSD-3-Clause"
] | null | null | null |
import types
import pytest
from utils import iterable
def test_flatten():
assert isinstance(iterable.flatten([]), types.GeneratorType)
assert tuple(iterable.flatten([])) == ()
assert tuple(iterable.flatten(())) == ()
with pytest.raises(TypeError):
tuple(iterable.flatten(''))
assert tuple(iterable.flatten([1, 2, 3, 4])) == (1, 2, 3, 4)
assert tuple(iterable.flatten((1, 2, 3, 4))) == (1, 2, 3, 4)
assert tuple(iterable.flatten(([1, 2], [3, 4]))) == (1, 2, 3, 4)
assert tuple(iterable.flatten(([[], [(), [[[1, ['3', [2.1]], None]], True]]]))) == (1, '3', 2.1, None, True)
assert tuple(iterable.flatten(((([(), ()])), []))) == ()
assert tuple(iterable.flatten(((1, 2, ('ext'), ([3.312]), None, []), None))) == (1, 2, 'ext', 3.312, None, None)
def test_split_on_chunks():
# a length is more a length of sequence
assert iterable.split_on_chunks([12, 32.12, 'sa'], 4, True) == [12, 32.12, 'sa']
# a length is equal a length of sequence
assert iterable.split_on_chunks((True, None, 'sa'), 3, True) == (True, None, 'sa')
# no support for a string
with pytest.raises(TypeError):
iterable.split_on_chunks('text', 20, False)
# a length is 0
with pytest.raises(ValueError):
iterable.split_on_chunks([1, 2.1], 0, False)
# a length is less 0
with pytest.raises(ValueError):
iterable.split_on_chunks([[], None], -10, True)
assert iterable.split_on_chunks((), 1) == ()
obj = (1, (2.1, [True, None]), [21, 'str'], '', False, 0)
assert iterable.split_on_chunks(obj, 1) == \
[(1, ), ((2.1, [True, None]), ), ([21, 'str'], ), ('', ), (False, ), (0, )]
assert iterable.split_on_chunks(obj, 2) == [(1, (2.1, [True, None])), ([21, 'str'], ''), (False, 0)]
assert iterable.split_on_chunks(obj, 3) == [(1, (2.1, [True, None]), [21, 'str']), ('', False, 0)]
assert iterable.split_on_chunks(obj, 4) == [(1, (2.1, [True, None]), [21, 'str'], ''), (False, 0)]
assert iterable.split_on_chunks(obj, 4, True) == [(1, (2.1, [True, None]), [21, 'str'], '', False, 0)]
assert iterable.split_on_chunks(obj, 5) == [(1, (2.1, [True, None]), [21, 'str'], '', False), (0, )]
assert iterable.split_on_chunks(obj, 5, True) == [(1, (2.1, [True, None]), [21, 'str'], '', False, 0, )]
assert iterable.split_on_chunks(obj, 6) == (1, (2.1, [True, None]), [21, 'str'], '', False, 0)
obj = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, True, None, False, '', []]
assert iterable.split_on_chunks(obj, 1) == [
(0, ),
(1, ),
(2, ),
(3, ),
(4, ),
(5, ),
(6, ),
(7, ),
(8, ),
(9, ),
(True, ),
(None, ),
(False, ),
('', ),
([], ),
]
assert iterable.split_on_chunks(obj, 2) == [
(0, 1),
(2, 3),
(4, 5),
(6, 7),
(8, 9),
(True, None),
(False, ''),
([], )
]
assert iterable.split_on_chunks(obj, 2, True) == [
(0, 1),
(2, 3),
(4, 5),
(6, 7),
(8, 9),
(True, None),
(False, '', []),
]
assert iterable.split_on_chunks(obj, 3) == [
(0, 1, 2),
(3, 4, 5),
(6, 7, 8),
(9, True, None),
(False, '', []),
]
assert iterable.split_on_chunks(obj, 4) == [
(0, 1, 2, 3),
(4, 5, 6, 7),
(8, 9, True, None),
(False, '', []),
]
assert iterable.split_on_chunks(obj, 4, True) == [
(0, 1, 2, 3),
(4, 5, 6, 7),
(8, 9, True, None, False, '', []),
]
assert iterable.split_on_chunks(obj, 5) == [
(0, 1, 2, 3, 4),
(5, 6, 7, 8, 9),
(True, None, False, '', []),
]
assert iterable.split_on_chunks(obj, 6) == [
(0, 1, 2, 3, 4, 5),
(6, 7, 8, 9, True, None),
(False, '', []),
]
assert iterable.split_on_chunks(obj, 6, True) == [
(0, 1, 2, 3, 4, 5),
(6, 7, 8, 9, True, None, False, '', []),
]
assert iterable.split_on_chunks(obj, 7) == [
(0, 1, 2, 3, 4, 5, 6),
(7, 8, 9, True, None, False, ''),
([],),
]
assert iterable.split_on_chunks(obj, 7, True) == [
(0, 1, 2, 3, 4, 5, 6),
(7, 8, 9, True, None, False, '', []),
]
assert iterable.split_on_chunks(obj, 8) == [
(0, 1, 2, 3, 4, 5, 6, 7),
(8, 9, True, None, False, '', []),
]
assert iterable.split_on_chunks(obj, 9) == [
(0, 1, 2, 3, 4, 5, 6, 7, 8),
(9, True, None, False, '', []),
]
assert iterable.split_on_chunks(obj, 9, True) == [(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, True, None, False, '', [])]
| 31.756757
| 116
| 0.464894
| 649
| 4,700
| 3.274268
| 0.086287
| 0.031059
| 0.177412
| 0.276706
| 0.835294
| 0.821647
| 0.810824
| 0.792471
| 0.765176
| 0.636235
| 0
| 0.08907
| 0.295319
| 4,700
| 147
| 117
| 31.972789
| 0.552536
| 0.028298
| 0
| 0.262295
| 0
| 0
| 0.010307
| 0
| 0
| 0
| 0
| 0
| 0.278689
| 1
| 0.016393
| false
| 0
| 0.02459
| 0
| 0.040984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c4264ada64fdd2ffe50defb07b48c1590ab629c
| 4,172
|
py
|
Python
|
badboids/test/test_simulator_model.py
|
RiannaK/Coursework2
|
471589593fd09c61fae39cb5975cc88fee36971c
|
[
"MIT"
] | null | null | null |
badboids/test/test_simulator_model.py
|
RiannaK/Coursework2
|
471589593fd09c61fae39cb5975cc88fee36971c
|
[
"MIT"
] | 2
|
2017-01-02T11:11:31.000Z
|
2017-01-02T22:09:15.000Z
|
badboids/test/test_simulator_model.py
|
RiannaK/Coursework2
|
471589593fd09c61fae39cb5975cc88fee36971c
|
[
"MIT"
] | null | null | null |
import numpy as np
import yaml
import os
from numpy.testing import assert_array_almost_equal as array_assert
from badboids.boids import Boids, SimulatorModel, SimulationParameters
def test_simulator_model_fly_towards_middle():
"""Tests Simulator 'fly towards middle' method"""
with open(os.path.join(os.path.dirname(__file__), 'fixtures', 'test_simulator_fixtures.yaml')) as fixtures_file:
fixtures = yaml.load(fixtures_file)['test_simulator_fly_towards_middle']
for fixture in fixtures:
# Arrange
positions = np.array(fixture.pop('positions'))
velocities = np.array(fixture.pop('velocities'))
expected_positions = np.array(fixture.pop('expected_positions'))
expected_velocities = np.array(fixture.pop('expected_velocities'))
boids = Boids(positions, velocities)
simulation_parameters = SimulationParameters.get_defaults()
sut = SimulatorModel(boids, simulation_parameters)
# Act
sut.fly_towards_middle()
# Assert
array_assert(sut.boids.positions, expected_positions, 6)
array_assert(sut.boids.velocities, expected_velocities, 6)
def test_simulator_model_fly_away_from_nearby_boids():
"""Tests Simulator 'fly_away_from_nearby_boids' method"""
with open(os.path.join(os.path.dirname(__file__), 'fixtures', 'test_simulator_fixtures.yaml')) as fixtures_file:
fixtures = yaml.load(fixtures_file)['test_simulator_fly_away_from_nearby_boids']
for fixture in fixtures:
# Arrange
positions = np.array(fixture.pop('positions'))
velocities = np.array(fixture.pop('velocities'))
expected_positions = np.array(fixture.pop('expected_positions'))
expected_velocities = np.array(fixture.pop('expected_velocities'))
boids = Boids(positions, velocities)
simulation_parameters = SimulationParameters.get_defaults()
sut = SimulatorModel(boids, simulation_parameters)
# Act
sut.fly_away_from_nearby_boids()
# Assert
array_assert(sut.boids.positions, expected_positions, 6)
array_assert(sut.boids.velocities, expected_velocities, 6)
def test_simulator_model_match_speed_of_nearby_boids():
"""Tests Simulator 'match_speed_of_nearby_boids' method"""
with open(os.path.join(os.path.dirname(__file__), 'fixtures', 'test_simulator_fixtures.yaml')) as fixtures_file:
fixtures = yaml.load(fixtures_file)['test_simulator_match_speed_of_nearby_boids']
for fixture in fixtures:
# Arrange
positions = np.array(fixture.pop('positions'))
velocities = np.array(fixture.pop('velocities'))
expected_positions = np.array(fixture.pop('expected_positions'))
expected_velocities = np.array(fixture.pop('expected_velocities'))
boids = Boids(positions, velocities)
simulation_parameters = SimulationParameters.get_defaults()
sut = SimulatorModel(boids, simulation_parameters)
# Act
sut.match_speed_of_nearby_boids()
# Assert
array_assert(sut.boids.positions, expected_positions, 6)
array_assert(sut.boids.velocities, expected_velocities, 6)
def test_simulator_model_update_velocities():
"""Tests Simulator 'update velocities' method"""
with open(os.path.join(os.path.dirname(__file__), 'fixtures', 'test_simulator_fixtures.yaml')) as fixtures_file:
fixtures = yaml.load(fixtures_file)['test_simulator_update_velocities']
for fixture in fixtures:
# Arrange
positions = np.array(fixture.pop('positions'))
velocities = np.array(fixture.pop('velocities'))
expected_positions = np.array(fixture.pop('expected_positions'))
expected_velocities = np.array(fixture.pop('expected_velocities'))
boids = Boids(positions, velocities)
simulation_parameters = SimulationParameters.get_defaults()
sut = SimulatorModel(boids, simulation_parameters)
# Act
sut.update_positions()
# Assert
array_assert(sut.boids.positions, expected_positions, 6)
array_assert(sut.boids.velocities, expected_velocities, 6)
| 37.585586
| 116
| 0.714286
| 474
| 4,172
| 5.989451
| 0.113924
| 0.039451
| 0.078901
| 0.095808
| 0.891511
| 0.860162
| 0.838676
| 0.838676
| 0.838676
| 0.838676
| 0
| 0.002353
| 0.185043
| 4,172
| 110
| 117
| 37.927273
| 0.832647
| 0.064238
| 0
| 0.721311
| 0
| 0
| 0.133299
| 0.067166
| 0
| 0
| 0
| 0
| 0.147541
| 1
| 0.065574
| false
| 0
| 0.081967
| 0
| 0.147541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66d7f9c6bc05b55e692f96edd8af9bf8673c31d0
| 23,455
|
py
|
Python
|
services/traction/acapy_client/api/did_exchange_api.py
|
Open-Earth-Foundation/traction
|
908b555a7f408a88541b7692d3730e37a297c919
|
[
"Apache-2.0"
] | 12
|
2022-01-29T20:30:03.000Z
|
2022-03-29T11:46:14.000Z
|
services/traction/acapy_client/api/did_exchange_api.py
|
Open-Earth-Foundation/traction
|
908b555a7f408a88541b7692d3730e37a297c919
|
[
"Apache-2.0"
] | 38
|
2021-11-22T17:52:50.000Z
|
2022-03-31T17:52:00.000Z
|
services/traction/acapy_client/api/did_exchange_api.py
|
Open-Earth-Foundation/traction
|
908b555a7f408a88541b7692d3730e37a297c919
|
[
"Apache-2.0"
] | 9
|
2021-11-22T18:05:48.000Z
|
2022-03-29T11:25:08.000Z
|
"""
Aries Cloud Agent
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v0.7.2
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from acapy_client.api_client import ApiClient, Endpoint as _Endpoint
from acapy_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types,
)
from acapy_client.model.conn_record import ConnRecord
from acapy_client.model.didx_request import DIDXRequest
class DidExchangeApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.didexchange_conn_id_accept_invitation_post_endpoint = _Endpoint(
settings={
"response_type": (ConnRecord,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/didexchange/{conn_id}/accept-invitation",
"operation_id": "didexchange_conn_id_accept_invitation_post",
"http_method": "POST",
"servers": None,
},
params_map={
"all": [
"conn_id",
"my_endpoint",
"my_label",
],
"required": [
"conn_id",
],
"nullable": [],
"enum": [],
"validation": [
"my_endpoint",
],
},
root_map={
"validations": {
("my_endpoint",): {
"regex": {
"pattern": r"^[A-Za-z0-9\.\-\+]+:\/\/([A-Za-z0-9][.A-Za-z0-9-_]+[A-Za-z0-9])+(:[1-9][0-9]*)?(\/[^?&#]+)?$", # noqa: E501
},
},
},
"allowed_values": {},
"openapi_types": {
"conn_id": (str,),
"my_endpoint": (str,),
"my_label": (str,),
},
"attribute_map": {
"conn_id": "conn_id",
"my_endpoint": "my_endpoint",
"my_label": "my_label",
},
"location_map": {
"conn_id": "path",
"my_endpoint": "query",
"my_label": "query",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self.didexchange_conn_id_accept_request_post_endpoint = _Endpoint(
settings={
"response_type": (ConnRecord,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/didexchange/{conn_id}/accept-request",
"operation_id": "didexchange_conn_id_accept_request_post",
"http_method": "POST",
"servers": None,
},
params_map={
"all": [
"conn_id",
"mediation_id",
"my_endpoint",
],
"required": [
"conn_id",
],
"nullable": [],
"enum": [],
"validation": [
"my_endpoint",
],
},
root_map={
"validations": {
("my_endpoint",): {
"regex": {
"pattern": r"^[A-Za-z0-9\.\-\+]+:\/\/([A-Za-z0-9][.A-Za-z0-9-_]+[A-Za-z0-9])+(:[1-9][0-9]*)?(\/[^?&#]+)?$", # noqa: E501
},
},
},
"allowed_values": {},
"openapi_types": {
"conn_id": (str,),
"mediation_id": (str,),
"my_endpoint": (str,),
},
"attribute_map": {
"conn_id": "conn_id",
"mediation_id": "mediation_id",
"my_endpoint": "my_endpoint",
},
"location_map": {
"conn_id": "path",
"mediation_id": "query",
"my_endpoint": "query",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self.didexchange_create_request_post_endpoint = _Endpoint(
settings={
"response_type": (ConnRecord,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/didexchange/create-request",
"operation_id": "didexchange_create_request_post",
"http_method": "POST",
"servers": None,
},
params_map={
"all": [
"their_public_did",
"alias",
"mediation_id",
"my_endpoint",
"my_label",
"use_public_did",
],
"required": [
"their_public_did",
],
"nullable": [],
"enum": [],
"validation": [
"their_public_did",
"my_endpoint",
],
},
root_map={
"validations": {
("their_public_did",): {
"regex": {
"pattern": r"^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\/[^#?]*)?([?][^#]*)?(\#.*)?$$", # noqa: E501
},
},
("my_endpoint",): {
"regex": {
"pattern": r"^[A-Za-z0-9\.\-\+]+:\/\/([A-Za-z0-9][.A-Za-z0-9-_]+[A-Za-z0-9])+(:[1-9][0-9]*)?(\/[^?&#]+)?$", # noqa: E501
},
},
},
"allowed_values": {},
"openapi_types": {
"their_public_did": (str,),
"alias": (str,),
"mediation_id": (str,),
"my_endpoint": (str,),
"my_label": (str,),
"use_public_did": (bool,),
},
"attribute_map": {
"their_public_did": "their_public_did",
"alias": "alias",
"mediation_id": "mediation_id",
"my_endpoint": "my_endpoint",
"my_label": "my_label",
"use_public_did": "use_public_did",
},
"location_map": {
"their_public_did": "query",
"alias": "query",
"mediation_id": "query",
"my_endpoint": "query",
"my_label": "query",
"use_public_did": "query",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
self.didexchange_receive_request_post_endpoint = _Endpoint(
settings={
"response_type": (ConnRecord,),
"auth": ["AuthorizationHeader"],
"endpoint_path": "/didexchange/receive-request",
"operation_id": "didexchange_receive_request_post",
"http_method": "POST",
"servers": None,
},
params_map={
"all": [
"alias",
"auto_accept",
"mediation_id",
"my_endpoint",
"body",
],
"required": [],
"nullable": [],
"enum": [],
"validation": [
"my_endpoint",
],
},
root_map={
"validations": {
("my_endpoint",): {
"regex": {
"pattern": r"^[A-Za-z0-9\.\-\+]+:\/\/([A-Za-z0-9][.A-Za-z0-9-_]+[A-Za-z0-9])+(:[1-9][0-9]*)?(\/[^?&#]+)?$", # noqa: E501
},
},
},
"allowed_values": {},
"openapi_types": {
"alias": (str,),
"auto_accept": (bool,),
"mediation_id": (str,),
"my_endpoint": (str,),
"body": (DIDXRequest,),
},
"attribute_map": {
"alias": "alias",
"auto_accept": "auto_accept",
"mediation_id": "mediation_id",
"my_endpoint": "my_endpoint",
},
"location_map": {
"alias": "query",
"auto_accept": "query",
"mediation_id": "query",
"my_endpoint": "query",
"body": "body",
},
"collection_format_map": {},
},
headers_map={
"accept": ["application/json"],
"content_type": [],
},
api_client=api_client,
)
def didexchange_conn_id_accept_invitation_post(self, conn_id, **kwargs):
"""Accept a stored connection invitation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.didexchange_conn_id_accept_invitation_post(conn_id, async_req=True)
>>> result = thread.get()
Args:
conn_id (str): Connection identifier
Keyword Args:
my_endpoint (str): My URL endpoint. [optional]
my_label (str): Label for connection request. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ConnRecord
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
kwargs["conn_id"] = conn_id
return self.didexchange_conn_id_accept_invitation_post_endpoint.call_with_http_info(
**kwargs
)
def didexchange_conn_id_accept_request_post(self, conn_id, **kwargs):
"""Accept a stored connection request # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.didexchange_conn_id_accept_request_post(conn_id, async_req=True)
>>> result = thread.get()
Args:
conn_id (str): Connection identifier
Keyword Args:
mediation_id (str): Identifier for active mediation record to be used. [optional]
my_endpoint (str): My URL endpoint. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ConnRecord
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
kwargs["conn_id"] = conn_id
return (
self.didexchange_conn_id_accept_request_post_endpoint.call_with_http_info(
**kwargs
)
)
def didexchange_create_request_post(self, their_public_did, **kwargs):
"""Create and send a request against public DID's implicit invitation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.didexchange_create_request_post(their_public_did, async_req=True)
>>> result = thread.get()
Args:
their_public_did (str): Qualified public DID to which to request connection
Keyword Args:
alias (atr): Alias to apply to this connection [optional]
mediation_id (str): Identifier for active mediation record to be used. [optional]
my_endpoint (str): My URL endpoint. [optional]
my_label (str): Label for connection request. [optional]
use_public_did (bool): Use public DID for this connection. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ConnRecord
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
kwargs["their_public_did"] = their_public_did
return self.didexchange_create_request_post_endpoint.call_with_http_info(
**kwargs
)
def didexchange_receive_request_post(self, **kwargs):
"""Receive request against public DID's implicit invitation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.didexchange_receive_request_post(async_req=True)
>>> result = thread.get()
Keyword Args:
alias (str): Alias for connection. [optional]
auto_accept (bool): Auto-accept connection (defaults to configuration). [optional]
mediation_id (str): Identifier for active mediation record to be used. [optional]
my_endpoint (str): My URL endpoint. [optional]
body (DIDXRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ConnRecord
If the method is called asynchronously, returns the request
thread.
"""
kwargs["async_req"] = kwargs.get("async_req", False)
kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True)
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
return self.didexchange_receive_request_post_endpoint.call_with_http_info(
**kwargs
)
| 43.841121
| 269
| 0.52249
| 2,353
| 23,455
| 4.951126
| 0.098173
| 0.027811
| 0.009013
| 0.010815
| 0.858627
| 0.84412
| 0.812103
| 0.795966
| 0.784635
| 0.76206
| 0
| 0.007951
| 0.372586
| 23,455
| 534
| 270
| 43.923221
| 0.783705
| 0.373652
| 0
| 0.662539
| 1
| 0.01548
| 0.306253
| 0.096498
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01548
| false
| 0
| 0.018576
| 0
| 0.049536
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dd7042b12962e69b59e1328c3e1a9f5c4fc5a476
| 259
|
py
|
Python
|
nmigen_boards/supercon19badge.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 11
|
2021-12-10T12:23:29.000Z
|
2022-03-13T08:40:20.000Z
|
nmigen_boards/supercon19badge.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 12
|
2021-12-11T18:51:29.000Z
|
2022-03-12T05:08:52.000Z
|
nmigen_boards/supercon19badge.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 7
|
2021-12-12T07:20:21.000Z
|
2022-03-06T06:20:55.000Z
|
from amaranth_boards.supercon19badge import *
from amaranth_boards.supercon19badge import __all__
import warnings
warnings.warn("instead of nmigen_boards.supercon19badge, use amaranth_boards.supercon19badge",
DeprecationWarning, stacklevel=2)
| 32.375
| 94
| 0.818533
| 27
| 259
| 7.555556
| 0.555556
| 0.411765
| 0.426471
| 0.323529
| 0.382353
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039823
| 0.127413
| 259
| 7
| 95
| 37
| 0.862832
| 0
| 0
| 0
| 0
| 0
| 0.297297
| 0.235521
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dd723162afea6c99b929d2337d5a57e084e8aba8
| 1,718
|
py
|
Python
|
gsoc2017-nausheen/HOLE/makeid_file.py
|
CodeMonkeyGjp/embeddings
|
ee4f206f5ede822780644d2b254eedd72b7126d6
|
[
"Apache-2.0"
] | 73
|
2017-06-27T18:35:34.000Z
|
2021-12-02T02:42:54.000Z
|
gsoc2017-nausheen/HOLE/makeid_file.py
|
CodeMonkeyGjp/embeddings
|
ee4f206f5ede822780644d2b254eedd72b7126d6
|
[
"Apache-2.0"
] | 8
|
2018-02-25T04:10:35.000Z
|
2022-03-11T23:21:49.000Z
|
gsoc2017-nausheen/HOLE/makeid_file.py
|
CodeMonkeyGjp/embeddings
|
ee4f206f5ede822780644d2b254eedd72b7126d6
|
[
"Apache-2.0"
] | 18
|
2017-05-19T16:37:56.000Z
|
2021-12-15T10:31:18.000Z
|
# -*- coding: utf-8 -*-
###Generates unique id for every entity and relation (predicate)
#Author: Nausheen Fatma, 25th June 2017
f=open("standard_data/train.txt","r")
id_dict={}
relation_dict={}
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
f=open("standard_data/test.txt","r")
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
f=open("standard_data/valid.txt","r")
for line in f:
line=line.strip()
line_tokens=line.split()
try:
a=id_dict[line_tokens[0]]
except:
id_dict[line_tokens[0]]=0
try:
a=id_dict[line_tokens[2]]
except:
id_dict[line_tokens[2]]=0
try:
a=relation_dict[line_tokens[1]]
except:
relation_dict[line_tokens[1]]=0
id_count=0
fw=open("standard_data/entity2id.txt","w")
for key in id_dict:
fw.write(key+"\t"+str(id_count)+"\n")
id_count=id_count+1
fw.close()
id_count=0
fw=open("standard_data/relation2id.txt","w")
for key in relation_dict:
fw.write(key+"\t"+str(id_count)+"\n")
id_count=id_count+1
fw.close()
print "Done"
| 19.303371
| 64
| 0.594296
| 264
| 1,718
| 3.655303
| 0.19697
| 0.217617
| 0.26114
| 0.198964
| 0.809326
| 0.784456
| 0.784456
| 0.73057
| 0.73057
| 0.73057
| 0
| 0.031422
| 0.259022
| 1,718
| 88
| 65
| 19.522727
| 0.72663
| 0.069849
| 0
| 0.84127
| 1
| 0
| 0.088568
| 0.077889
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.015873
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
06c34614fd6047f059e3d2462a7d41c3cfff3322
| 3,334
|
py
|
Python
|
src/animator/fadein.py
|
Z3t5upro/AC_Management
|
d9c64177312d8c3eebc5e05124fd372bfe42d000
|
[
"MIT"
] | 22
|
2020-01-05T19:30:13.000Z
|
2022-03-29T06:14:04.000Z
|
src/animator/fadein.py
|
Z3t5upro/AC_Management
|
d9c64177312d8c3eebc5e05124fd372bfe42d000
|
[
"MIT"
] | 7
|
2019-12-07T09:51:11.000Z
|
2021-12-05T00:22:46.000Z
|
src/animator/fadein.py
|
Z3t5upro/AC_Management
|
d9c64177312d8c3eebc5e05124fd372bfe42d000
|
[
"MIT"
] | 8
|
2019-11-17T07:16:47.000Z
|
2021-10-06T04:32:22.000Z
|
from kivy.animation import Animation
from functools import partial
from .base import Animator
__all__ = (
"FadeInAnimator",
"FadeInDownAnimator",
"FadeInLeftAnimator",
"FadeInRightAnimator",
"FadeInUpAnimator",
)
# fade in
class FadeInAnimator(Animator):
def start_(self, tmp=None):
props = [
"opacity",
]
vals = [
0,
]
self._initialize(**dict(zip(props, vals)))
vals = [
1,
]
anim = Animation(d=self.duration, **dict(zip(props, vals)),)
anim.cancel_all(self.widget)
anim.start(self.widget)
anim.bind(on_complete=partial(self.anim_complete, self))
class FadeInDownAnimator(Animator):
def start_(self, tmp=None):
props = ["opacity", "pos_hint"]
__tmp = {}
for key, val in self._original["pos_hint"].items():
if key in ["center_y", "y", "top"]:
__tmp[key] = val + 0.2
else:
__tmp[key] = val
vals = [0, __tmp]
self._initialize(**dict(zip(props, vals)))
vals = [1, self._original["pos_hint"]]
anim = Animation(d=self.duration, **dict(zip(props, vals)),)
anim.cancel_all(self.widget)
anim.start(self.widget)
anim.bind(on_complete=partial(self.anim_complete, self))
class FadeInLeftAnimator(Animator):
def start_(self, tmp=None):
props = ["opacity", "pos_hint"]
__tmp = {}
for key, val in self._original["pos_hint"].items():
if key in ["center_x", "x", "left"]:
__tmp[key] = val - 0.2
else:
__tmp[key] = val
vals = [0, __tmp]
self._initialize(**dict(zip(props, vals)))
vals = [1, self._original["pos_hint"]]
anim = Animation(d=self.duration, **dict(zip(props, vals)),)
anim.cancel_all(self.widget)
anim.start(self.widget)
anim.bind(on_complete=partial(self.anim_complete, self))
class FadeInRightAnimator(Animator):
def start_(self, tmp=None):
props = ["opacity", "pos_hint"]
__tmp = {}
for key, val in self._original["pos_hint"].items():
if key in ["center_x", "x", "left"]:
__tmp[key] = val + 0.2
else:
__tmp[key] = val
vals = [0, __tmp]
self._initialize(**dict(zip(props, vals)))
vals = [1, self._original["pos_hint"]]
anim = Animation(d=self.duration, **dict(zip(props, vals)),)
anim.cancel_all(self.widget)
anim.start(self.widget)
anim.bind(on_complete=partial(self.anim_complete, self))
class FadeInUpAnimator(Animator):
def start_(self, tmp=None):
props = ["opacity", "pos_hint"]
__tmp = {}
for key, val in self._original["pos_hint"].items():
if key in ["center_y", "y", "top"]:
__tmp[key] = val - 0.2
else:
__tmp[key] = val
vals = [0, __tmp]
self._initialize(**dict(zip(props, vals)))
vals = [1, self._original["pos_hint"]]
anim = Animation(d=self.duration, **dict(zip(props, vals)),)
anim.cancel_all(self.widget)
anim.start(self.widget)
anim.bind(on_complete=partial(self.anim_complete, self))
| 27.553719
| 68
| 0.553989
| 388
| 3,334
| 4.541237
| 0.136598
| 0.047673
| 0.068104
| 0.090806
| 0.847333
| 0.847333
| 0.847333
| 0.847333
| 0.805335
| 0.805335
| 0
| 0.007699
| 0.29874
| 3,334
| 120
| 69
| 27.783333
| 0.745937
| 0.0021
| 0
| 0.755556
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.033333
| 0
| 0.144444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
661355e19611e5c35bc39aec5dc57bb7fcc1f23a
| 12,359
|
py
|
Python
|
robot_con/nxt/nxtrobot_pb2_grpc.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 23
|
2021-04-02T09:02:04.000Z
|
2022-03-22T05:31:03.000Z
|
robot_con/nxt/nxtrobot_pb2_grpc.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 35
|
2021-04-12T09:41:05.000Z
|
2022-03-26T13:32:46.000Z
|
robot_con/nxt/nxtrobot_pb2_grpc.py
|
takuya-ki/wrs
|
f6e1009b94332504042fbde9b39323410394ecde
|
[
"MIT"
] | 16
|
2021-03-30T11:55:45.000Z
|
2022-03-30T07:10:59.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import robotconn.rpc.nxtrobot.nxtrobot_pb2 as nxtrobot__pb2
class NxtStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.checkEncoders = channel.unary_unary(
'/Nxt/checkEncoders',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.servoOn = channel.unary_unary(
'/Nxt/servoOn',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.servoOff = channel.unary_unary(
'/Nxt/servoOff',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.goInitial = channel.unary_unary(
'/Nxt/goInitial',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.goOffPose = channel.unary_unary(
'/Nxt/goOffPose',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.getJointAngles = channel.unary_unary(
'/Nxt/getJointAngles',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.ReturnValue.FromString,
)
self.setJointAngles = channel.unary_unary(
'/Nxt/setJointAngles',
request_serializer=nxtrobot__pb2.SendValue.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.playPattern = channel.unary_unary(
'/Nxt/playPattern',
request_serializer=nxtrobot__pb2.SendValue.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.closeHandToolLft = channel.unary_unary(
'/Nxt/closeHandToolLft',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.closeHandToolRgt = channel.unary_unary(
'/Nxt/closeHandToolRgt',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.openHandToolLft = channel.unary_unary(
'/Nxt/openHandToolLft',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.openHandToolRgt = channel.unary_unary(
'/Nxt/openHandToolRgt',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.ejectHandToolLft = channel.unary_unary(
'/Nxt/ejectHandToolLft',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.ejectHandToolRgt = channel.unary_unary(
'/Nxt/ejectHandToolRgt',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.attachHandToolLft = channel.unary_unary(
'/Nxt/attachHandToolLft',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
self.attachHandToolRgt = channel.unary_unary(
'/Nxt/attachHandToolRgt',
request_serializer=nxtrobot__pb2.Empty.SerializeToString,
response_deserializer=nxtrobot__pb2.Status.FromString,
)
class NxtServicer(object):
# missing associated documentation comment in .proto file
pass
def checkEncoders(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def servoOn(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def servoOff(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def goInitial(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def goOffPose(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def getJointAngles(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def setJointAngles(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def playPattern(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def closeHandToolLft(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def closeHandToolRgt(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def openHandToolLft(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def openHandToolRgt(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ejectHandToolLft(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ejectHandToolRgt(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def attachHandToolLft(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def attachHandToolRgt(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_NxtServicer_to_server(servicer, server):
rpc_method_handlers = {
'checkEncoders': grpc.unary_unary_rpc_method_handler(
servicer.checkEncoders,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'servoOn': grpc.unary_unary_rpc_method_handler(
servicer.servoOn,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'servoOff': grpc.unary_unary_rpc_method_handler(
servicer.servoOff,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'goInitial': grpc.unary_unary_rpc_method_handler(
servicer.goInitial,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'goOffPose': grpc.unary_unary_rpc_method_handler(
servicer.goOffPose,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'getJointAngles': grpc.unary_unary_rpc_method_handler(
servicer.getJointAngles,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.ReturnValue.SerializeToString,
),
'setJointAngles': grpc.unary_unary_rpc_method_handler(
servicer.setJointAngles,
request_deserializer=nxtrobot__pb2.SendValue.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'playPattern': grpc.unary_unary_rpc_method_handler(
servicer.playPattern,
request_deserializer=nxtrobot__pb2.SendValue.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'closeHandToolLft': grpc.unary_unary_rpc_method_handler(
servicer.closeHandToolLft,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'closeHandToolRgt': grpc.unary_unary_rpc_method_handler(
servicer.closeHandToolRgt,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'openHandToolLft': grpc.unary_unary_rpc_method_handler(
servicer.openHandToolLft,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'openHandToolRgt': grpc.unary_unary_rpc_method_handler(
servicer.openHandToolRgt,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'ejectHandToolLft': grpc.unary_unary_rpc_method_handler(
servicer.ejectHandToolLft,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'ejectHandToolRgt': grpc.unary_unary_rpc_method_handler(
servicer.ejectHandToolRgt,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'attachHandToolLft': grpc.unary_unary_rpc_method_handler(
servicer.attachHandToolLft,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
'attachHandToolRgt': grpc.unary_unary_rpc_method_handler(
servicer.attachHandToolRgt,
request_deserializer=nxtrobot__pb2.Empty.FromString,
response_serializer=nxtrobot__pb2.Status.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Nxt', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 40.923841
| 74
| 0.736872
| 1,202
| 12,359
| 7.305324
| 0.067388
| 0.082679
| 0.076529
| 0.075846
| 0.795695
| 0.795695
| 0.795695
| 0.726455
| 0.726455
| 0.726455
| 0
| 0.006544
| 0.183915
| 12,359
| 301
| 75
| 41.059801
| 0.864069
| 0.090865
| 0
| 0.56917
| 1
| 0
| 0.111171
| 0.01143
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071146
| false
| 0.071146
| 0.007905
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b09058614bd6cf3c3076c21268ececbb42327a30
| 38
|
py
|
Python
|
SWEA/test.py
|
Minoolian/Coding_Test
|
5eb781b880919733b07d6a0313f592a6a9ac117a
|
[
"MIT"
] | null | null | null |
SWEA/test.py
|
Minoolian/Coding_Test
|
5eb781b880919733b07d6a0313f592a6a9ac117a
|
[
"MIT"
] | null | null | null |
SWEA/test.py
|
Minoolian/Coding_Test
|
5eb781b880919733b07d6a0313f592a6a9ac117a
|
[
"MIT"
] | null | null | null |
a=[[2,3], [3,4], [4,5]]
print(*a[-1])
| 12.666667
| 23
| 0.368421
| 10
| 38
| 1.4
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205882
| 0.105263
| 38
| 3
| 24
| 12.666667
| 0.205882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
b0cdeba38c48d4be4a4c968a7e6251c24ceb6bc8
| 16,931
|
py
|
Python
|
sdk/python/pulumi_akamai/gtm_cidrmap.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-01-21T15:22:12.000Z
|
2021-08-25T14:15:29.000Z
|
sdk/python/pulumi_akamai/gtm_cidrmap.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 59
|
2020-08-13T14:39:36.000Z
|
2022-03-31T15:19:48.000Z
|
sdk/python/pulumi_akamai/gtm_cidrmap.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['GtmCidrmapArgs', 'GtmCidrmap']
@pulumi.input_type
class GtmCidrmapArgs:
def __init__(__self__, *,
default_datacenter: pulumi.Input['GtmCidrmapDefaultDatacenterArgs'],
domain: pulumi.Input[str],
assignments: Optional[pulumi.Input[Sequence[pulumi.Input['GtmCidrmapAssignmentArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a GtmCidrmap resource.
"""
pulumi.set(__self__, "default_datacenter", default_datacenter)
pulumi.set(__self__, "domain", domain)
if assignments is not None:
pulumi.set(__self__, "assignments", assignments)
if name is not None:
pulumi.set(__self__, "name", name)
if wait_on_complete is not None:
pulumi.set(__self__, "wait_on_complete", wait_on_complete)
@property
@pulumi.getter(name="defaultDatacenter")
def default_datacenter(self) -> pulumi.Input['GtmCidrmapDefaultDatacenterArgs']:
return pulumi.get(self, "default_datacenter")
@default_datacenter.setter
def default_datacenter(self, value: pulumi.Input['GtmCidrmapDefaultDatacenterArgs']):
pulumi.set(self, "default_datacenter", value)
@property
@pulumi.getter
def domain(self) -> pulumi.Input[str]:
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: pulumi.Input[str]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter
def assignments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GtmCidrmapAssignmentArgs']]]]:
return pulumi.get(self, "assignments")
@assignments.setter
def assignments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GtmCidrmapAssignmentArgs']]]]):
pulumi.set(self, "assignments", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "wait_on_complete")
@wait_on_complete.setter
def wait_on_complete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_on_complete", value)
@pulumi.input_type
class _GtmCidrmapState:
def __init__(__self__, *,
assignments: Optional[pulumi.Input[Sequence[pulumi.Input['GtmCidrmapAssignmentArgs']]]] = None,
default_datacenter: Optional[pulumi.Input['GtmCidrmapDefaultDatacenterArgs']] = None,
domain: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering GtmCidrmap resources.
"""
if assignments is not None:
pulumi.set(__self__, "assignments", assignments)
if default_datacenter is not None:
pulumi.set(__self__, "default_datacenter", default_datacenter)
if domain is not None:
pulumi.set(__self__, "domain", domain)
if name is not None:
pulumi.set(__self__, "name", name)
if wait_on_complete is not None:
pulumi.set(__self__, "wait_on_complete", wait_on_complete)
@property
@pulumi.getter
def assignments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['GtmCidrmapAssignmentArgs']]]]:
return pulumi.get(self, "assignments")
@assignments.setter
def assignments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['GtmCidrmapAssignmentArgs']]]]):
pulumi.set(self, "assignments", value)
@property
@pulumi.getter(name="defaultDatacenter")
def default_datacenter(self) -> Optional[pulumi.Input['GtmCidrmapDefaultDatacenterArgs']]:
return pulumi.get(self, "default_datacenter")
@default_datacenter.setter
def default_datacenter(self, value: Optional[pulumi.Input['GtmCidrmapDefaultDatacenterArgs']]):
pulumi.set(self, "default_datacenter", value)
@property
@pulumi.getter
def domain(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "domain")
@domain.setter
def domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "domain", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "wait_on_complete")
@wait_on_complete.setter
def wait_on_complete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_on_complete", value)
class GtmCidrmap(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
assignments: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GtmCidrmapAssignmentArgs']]]]] = None,
default_datacenter: Optional[pulumi.Input[pulumi.InputType['GtmCidrmapDefaultDatacenterArgs']]] = None,
domain: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Use the `GtmCidrmap` resource to create, configure, and import a GTM Classless Inter-Domain Routing (CIDR) map. CIDR mapping uses the IP addresses of the requesting name server to provide IP-specific CNAME entries. CNAMEs let you direct internal users to a specific environment or direct them to the origin. This lets you provide different responses to an internal corporate DNS infrastructure, such as internal test environments and another answer for all other name servers (`default_datacenter`).
CIDR maps split the Internet into multiple CIDR block zones. Properties that use a map can specify a handout CNAME for each zone on the property's editing page. To configure a property for CIDR mapping, your domain needs at least one CIDR map defined.
> **Note** Import requires an ID with this format: `existing_domain_name`:`existing_map_name`.
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
demo_cidrmap = akamai.GtmCidrmap("demoCidrmap",
default_datacenter=akamai.GtmCidrmapDefaultDatacenterArgs(
datacenter_id=5400,
nickname="All Other CIDR Blocks",
),
domain="demo_domain.akadns.net")
```
## Argument reference
This resource supports these arguments:
* `domain` - (Required) GTM Domain name for the AS Map.
* `name` - (Required) A descriptive label for the CIDR map, up to 255 characters.
* `default_datacenter` - (Required) A placeholder for all other CIDR zones not found in these CIDR zones. Requires these additional arguments:
* `datacenter_id` - (Required) For each property, an identifier for all other CIDR zones.
* `nickname` - (Required) A descriptive label for the all other CIDR blocks.
* `wait_on_complete` - (Optional) A boolean that, if set to `true`, waits for transaction to complete.
* `assignment` - (Optional) Contains information about the CIDR zone groupings of CIDR blocks. You can have multiple entries with this argument. If used, requires these additional arguments:
* `datacenter_id` - (Optional) A unique identifier for an existing data center in the domain.
* `nickname` - (Optional) A descriptive label for the CIDR zone group, up to 256 characters.
* `blocks` - (Optional, list) Specifies an array of CIDR blocks.
## Schema reference
You can download the GTM CIDR Map backing schema from the [Global Traffic Management API](https://developer.akamai.com/api/web_performance/global_traffic_management/v1.html#cidrmap) page.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GtmCidrmapArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Use the `GtmCidrmap` resource to create, configure, and import a GTM Classless Inter-Domain Routing (CIDR) map. CIDR mapping uses the IP addresses of the requesting name server to provide IP-specific CNAME entries. CNAMEs let you direct internal users to a specific environment or direct them to the origin. This lets you provide different responses to an internal corporate DNS infrastructure, such as internal test environments and another answer for all other name servers (`default_datacenter`).
CIDR maps split the Internet into multiple CIDR block zones. Properties that use a map can specify a handout CNAME for each zone on the property's editing page. To configure a property for CIDR mapping, your domain needs at least one CIDR map defined.
> **Note** Import requires an ID with this format: `existing_domain_name`:`existing_map_name`.
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
demo_cidrmap = akamai.GtmCidrmap("demoCidrmap",
default_datacenter=akamai.GtmCidrmapDefaultDatacenterArgs(
datacenter_id=5400,
nickname="All Other CIDR Blocks",
),
domain="demo_domain.akadns.net")
```
## Argument reference
This resource supports these arguments:
* `domain` - (Required) GTM Domain name for the AS Map.
* `name` - (Required) A descriptive label for the CIDR map, up to 255 characters.
* `default_datacenter` - (Required) A placeholder for all other CIDR zones not found in these CIDR zones. Requires these additional arguments:
* `datacenter_id` - (Required) For each property, an identifier for all other CIDR zones.
* `nickname` - (Required) A descriptive label for the all other CIDR blocks.
* `wait_on_complete` - (Optional) A boolean that, if set to `true`, waits for transaction to complete.
* `assignment` - (Optional) Contains information about the CIDR zone groupings of CIDR blocks. You can have multiple entries with this argument. If used, requires these additional arguments:
* `datacenter_id` - (Optional) A unique identifier for an existing data center in the domain.
* `nickname` - (Optional) A descriptive label for the CIDR zone group, up to 256 characters.
* `blocks` - (Optional, list) Specifies an array of CIDR blocks.
## Schema reference
You can download the GTM CIDR Map backing schema from the [Global Traffic Management API](https://developer.akamai.com/api/web_performance/global_traffic_management/v1.html#cidrmap) page.
:param str resource_name: The name of the resource.
:param GtmCidrmapArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GtmCidrmapArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
assignments: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GtmCidrmapAssignmentArgs']]]]] = None,
default_datacenter: Optional[pulumi.Input[pulumi.InputType['GtmCidrmapDefaultDatacenterArgs']]] = None,
domain: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GtmCidrmapArgs.__new__(GtmCidrmapArgs)
__props__.__dict__["assignments"] = assignments
if default_datacenter is None and not opts.urn:
raise TypeError("Missing required property 'default_datacenter'")
__props__.__dict__["default_datacenter"] = default_datacenter
if domain is None and not opts.urn:
raise TypeError("Missing required property 'domain'")
__props__.__dict__["domain"] = domain
__props__.__dict__["name"] = name
__props__.__dict__["wait_on_complete"] = wait_on_complete
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="akamai:trafficmanagement/gtmCidrmap:GtmCidrmap")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(GtmCidrmap, __self__).__init__(
'akamai:index/gtmCidrmap:GtmCidrmap',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
assignments: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['GtmCidrmapAssignmentArgs']]]]] = None,
default_datacenter: Optional[pulumi.Input[pulumi.InputType['GtmCidrmapDefaultDatacenterArgs']]] = None,
domain: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None) -> 'GtmCidrmap':
"""
Get an existing GtmCidrmap resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GtmCidrmapState.__new__(_GtmCidrmapState)
__props__.__dict__["assignments"] = assignments
__props__.__dict__["default_datacenter"] = default_datacenter
__props__.__dict__["domain"] = domain
__props__.__dict__["name"] = name
__props__.__dict__["wait_on_complete"] = wait_on_complete
return GtmCidrmap(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def assignments(self) -> pulumi.Output[Optional[Sequence['outputs.GtmCidrmapAssignment']]]:
return pulumi.get(self, "assignments")
@property
@pulumi.getter(name="defaultDatacenter")
def default_datacenter(self) -> pulumi.Output['outputs.GtmCidrmapDefaultDatacenter']:
return pulumi.get(self, "default_datacenter")
@property
@pulumi.getter
def domain(self) -> pulumi.Output[str]:
return pulumi.get(self, "domain")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "wait_on_complete")
| 47.963173
| 507
| 0.673616
| 1,933
| 16,931
| 5.699948
| 0.135541
| 0.057905
| 0.067254
| 0.029951
| 0.823561
| 0.803776
| 0.777727
| 0.751407
| 0.727718
| 0.708568
| 0
| 0.001767
| 0.23141
| 16,931
| 352
| 508
| 48.099432
| 0.844924
| 0.34617
| 0
| 0.682692
| 1
| 0
| 0.143515
| 0.061164
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0.052885
| 0.033654
| 0.072115
| 0.278846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b0da0635aa3677b477b1fc230cff5846796d62ba
| 1,761
|
py
|
Python
|
netbox/ipam/migrations/0036_standardize_description.py
|
aslafy-z/netbox
|
a5512dd4c46c005df8752fc330c1382ac22b31ea
|
[
"Apache-2.0"
] | 2
|
2021-06-02T03:00:05.000Z
|
2021-07-30T18:52:32.000Z
|
netbox/ipam/migrations/0036_standardize_description.py
|
emersonfelipesp/netbox
|
fecca5ad83fb6b48a2f15982dfd3242653f105f9
|
[
"Apache-2.0"
] | 25
|
2019-09-17T19:40:50.000Z
|
2022-03-11T04:01:55.000Z
|
netbox/ipam/migrations/0036_standardize_description.py
|
emersonfelipesp/netbox
|
fecca5ad83fb6b48a2f15982dfd3242653f105f9
|
[
"Apache-2.0"
] | 1
|
2018-12-05T12:03:21.000Z
|
2018-12-05T12:03:21.000Z
|
# Generated by Django 3.0.3 on 2020-03-13 20:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ipam', '0035_drop_ip_family'),
]
operations = [
migrations.AddField(
model_name='rir',
name='description',
field=models.CharField(blank=True, max_length=200),
),
migrations.AddField(
model_name='vlangroup',
name='description',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='aggregate',
name='description',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='ipaddress',
name='description',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='prefix',
name='description',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='role',
name='description',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='service',
name='description',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='vlan',
name='description',
field=models.CharField(blank=True, max_length=200),
),
migrations.AlterField(
model_name='vrf',
name='description',
field=models.CharField(blank=True, max_length=200),
),
]
| 29.847458
| 63
| 0.555934
| 164
| 1,761
| 5.841463
| 0.280488
| 0.084551
| 0.187891
| 0.244259
| 0.748434
| 0.748434
| 0.748434
| 0.748434
| 0.748434
| 0.748434
| 0
| 0.038819
| 0.327087
| 1,761
| 58
| 64
| 30.362069
| 0.76962
| 0.025554
| 0
| 0.692308
| 1
| 0
| 0.102684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b0e6ccd81a588b62815eb12c5e42b36ac642464b
| 739
|
py
|
Python
|
cool/util/pack.py
|
d2verb/cool
|
8d9f3d3a89877de7941d4a7a32e38a0be6bd050e
|
[
"MIT"
] | 4
|
2020-07-17T11:37:28.000Z
|
2022-01-25T04:52:01.000Z
|
cool/util/pack.py
|
d2verb/cool
|
8d9f3d3a89877de7941d4a7a32e38a0be6bd050e
|
[
"MIT"
] | 1
|
2020-07-02T15:30:36.000Z
|
2020-07-02T15:30:36.000Z
|
cool/util/pack.py
|
d2verb/ctftools
|
8d9f3d3a89877de7941d4a7a32e38a0be6bd050e
|
[
"MIT"
] | 1
|
2020-09-29T16:45:29.000Z
|
2020-09-29T16:45:29.000Z
|
def p16(number: int, endian: str = "little") -> bytes:
return number.to_bytes(2, byteorder=endian)
def p32(number: int, endian: str = "little") -> bytes:
return number.to_bytes(4, byteorder=endian)
def p64(number: int, endian: str = "little") -> bytes:
return number.to_bytes(8, byteorder=endian)
def u16(data: bytes, endian: str = "little", signed: bool = False) -> int:
return int.from_bytes(data, byteorder=endian, signed=signed)
def u32(data: bytes, endian: str = "little", signed: bool = False) -> int:
return int.from_bytes(data, byteorder=endian, signed=signed)
def u64(data: bytes, endian: str = "little", signed: bool = False) -> int:
return int.from_bytes(data, byteorder=endian, signed=signed)
| 32.130435
| 74
| 0.686062
| 105
| 739
| 4.771429
| 0.228571
| 0.107784
| 0.179641
| 0.107784
| 0.844311
| 0.844311
| 0.844311
| 0.844311
| 0.844311
| 0.844311
| 0
| 0.024272
| 0.163735
| 739
| 22
| 75
| 33.590909
| 0.786408
| 0
| 0
| 0.25
| 0
| 0
| 0.048714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
b0f8643750a5c22a0fbfc91abe28f8af50458aac
| 277
|
py
|
Python
|
drf_writable_nested/serializers.py
|
Lagyu/drf-writable-nested
|
4382f240d9461f4d070d4b4b5df62bd3b4731467
|
[
"BSD-2-Clause"
] | null | null | null |
drf_writable_nested/serializers.py
|
Lagyu/drf-writable-nested
|
4382f240d9461f4d070d4b4b5df62bd3b4731467
|
[
"BSD-2-Clause"
] | null | null | null |
drf_writable_nested/serializers.py
|
Lagyu/drf-writable-nested
|
4382f240d9461f4d070d4b4b5df62bd3b4731467
|
[
"BSD-2-Clause"
] | null | null | null |
from rest_framework import serializers
from .mixins import NestedCreateMixin, NestedUpdateMixin, NestedGetOrCreateMixin
class WritableNestedModelSerializer(NestedGetOrCreateMixin, NestedUpdateMixin,
serializers.ModelSerializer):
pass
| 30.777778
| 80
| 0.758123
| 18
| 277
| 11.611111
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209386
| 277
| 8
| 81
| 34.625
| 0.954338
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b02eaeae1a6b6999c3580fdc4710cbde3a81f884
| 86
|
py
|
Python
|
icarus_simulator/strategies/zone_select/__init__.py
|
RubenFr/ICARUS-framework
|
e57a1f50c3bb9522b2a279fee6b625628afd056f
|
[
"MIT"
] | 5
|
2021-08-31T08:07:41.000Z
|
2022-01-04T02:09:25.000Z
|
icarus_simulator/strategies/zone_select/__init__.py
|
RubenFr/ICARUS-framework
|
e57a1f50c3bb9522b2a279fee6b625628afd056f
|
[
"MIT"
] | 3
|
2021-09-23T09:06:35.000Z
|
2021-12-08T04:53:01.000Z
|
icarus_simulator/strategies/zone_select/__init__.py
|
RubenFr/ICARUS-framework
|
e57a1f50c3bb9522b2a279fee6b625628afd056f
|
[
"MIT"
] | 2
|
2022-01-19T17:50:56.000Z
|
2022-03-06T18:59:41.000Z
|
from .list_zone_strat import ListZoneStrat
from .rand_zone_strat import RandZoneStrat
| 28.666667
| 42
| 0.883721
| 12
| 86
| 6
| 0.666667
| 0.25
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 86
| 2
| 43
| 43
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b04a3adaaf73c09b7b7180a4d9f204c1d9725e6a
| 23,786
|
py
|
Python
|
tests/test_resource_types_base.py
|
itomaldonado/PyTravisCI
|
0b6d4ce84d7b0c596e3174d96753a2c1d8e4b280
|
[
"MIT"
] | 3
|
2020-03-18T09:15:40.000Z
|
2021-12-05T11:09:23.000Z
|
tests/test_resource_types_base.py
|
itomaldonado/PyTravisCI
|
0b6d4ce84d7b0c596e3174d96753a2c1d8e4b280
|
[
"MIT"
] | 4
|
2020-12-12T16:10:13.000Z
|
2021-04-12T07:07:08.000Z
|
tests/test_resource_types_base.py
|
itomaldonado/PyTravisCI
|
0b6d4ce84d7b0c596e3174d96753a2c1d8e4b280
|
[
"MIT"
] | 1
|
2021-03-11T15:33:46.000Z
|
2021-03-11T15:33:46.000Z
|
"""
Just another Python API for Travis CI (API).
A module which provides the tests of our resource types base class.
Author:
Nissar Chababy, @funilrys, contactTATAfunilrysTODTODcom
Project link:
https://github.com/funilrys/PyTravisCI
Project documentation:
https://pytravisci.readthedocs.io/en/latest/
License
::
MIT License
Copyright (c) 2019, 2020 Nissar Chababy
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import copy
import json
from datetime import datetime
from unittest import TestCase
from unittest import main as launch_tests
from PyTravisCI.resource_types.base import ResourceTypesBase
class TestResourceTypesBase(TestCase):
"""
Provides the tests of the resource types base class.
"""
# pylint: disable=too-many-public-methods
given_data: dict = {
"_at_type": "user",
"_at_href": "/user/4549848944894848948949",
"_at_representation": "standard",
"_at_permissions": {"read": True, "sync": True},
"id": 4549848944894848948949,
"login": "foobar",
"name": "Foo Bar",
"github_id": 1148942198798789784897949849484523106,
"vcs_id": "1148942198798789784897949849484523106",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": datetime(2020, 10, 14, 15, 53, 8),
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["foo", "bar"],
}
given_data_minimal: dict = {
"_at_type": "user",
"_at_href": "/user/4549848944894848948949",
"_at_representation": "minimal",
"id": 4549848944894848948949,
"name": "Foo Bar",
"login": "foobar",
}
pagination_data: dict = {
"_at_type": "branches",
"_at_href": "/repo/891/branches?exists_on_github=true&limit=5",
"_at_representation": "standard",
"_at_pagination": {
"limit": 5,
"offset": 0,
"count": 24,
"is_first": True,
"is_last": False,
"next": {
"_at_href": "/repo/25387022/branches?exists_on_github=true&limit=5&offset=5",
"offset": 5,
"limit": 5,
},
"prev": {
"_at_href": "/repo/25387022/branches?exists_on_github=true&limit=0&offset=5",
"offset": 0,
"limit": 5,
},
"first": {
"_at_href": "/repo/25387022/branches?exists_on_github=true&limit=5",
"offset": 0,
"limit": 5,
},
"last": {
"_at_href": "/repo/25387022/branches?exists_on_github=true&limit=5&offset=20",
"offset": 20,
"limit": 5,
},
},
}
def test_init(self) -> None:
"""
Tests of the method which let us initiate a resource type.
"""
resource = ResourceTypesBase(**self.given_data)
for index, value in self.given_data.items():
self.assertEqual(value, resource.__dict__[index])
def test_getitem(self) -> None:
"""
Tests of the getitem method.
"""
resource = ResourceTypesBase(**self.given_data)
for index, value in self.given_data.items():
self.assertEqual(value, resource[index])
def test_getitem_not_set(self) -> None:
"""
Tests of the getitem method for the case that we want to access an
unexistant attribute.
"""
resource = ResourceTypesBase(**self.given_data)
self.assertRaises(AttributeError, lambda: resource["hello_world"])
def test_getitem_through_iter(self) -> None:
"""
Tests of the getitem method for the case that we declar an
index/attribute to iter through.
"""
given_data = {
"__iter_through__": "users",
"users": [self.given_data for _ in range(3)],
}
resource = ResourceTypesBase(**given_data)
self.assertEqual(given_data["users"][2], resource[2])
def test_repr(self) -> None:
"""
Tests of the repr method.
"""
given_data = {"hello": "world", "world": "hello"}
resource = ResourceTypesBase(**given_data)
expected = f"<ResourceTypesBase {given_data} />"
actual = repr(resource)
self.assertEqual(expected, actual)
def test_setattr(self) -> None:
"""
Tests of the setattr method.
"""
expected = "Hello, World!"
resource = ResourceTypesBase(**self.given_data)
# pylint: disable=protected-access
resource._PyTravisCI = "Hello, World!"
actual = resource._PyTravisCI
self.assertEqual(expected, actual)
def test_setattr_overwrite(self) -> None:
"""
Tests of the setattr method for the case that we want to overwrite all
attributes.
"""
to_overwrite = {
"_at_type": "user",
"_at_href": "/user/5555555555555555555555555555555",
"_at_representation": "standard",
"_at_permissions": {"read": True, "sync": True},
"id": 5555555555555555555555555555555,
"login": "foobar",
"name": "Foo Bar",
"github_id": 5555555555555555555555555555555,
"vcs_id": "5555555555555555555555555555555",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T14:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
}
resource = ResourceTypesBase(**self.given_data)
resource.__dict__ = to_overwrite
for index, value in to_overwrite.items():
self.assertEqual(value, resource[index])
def test_setattr_unauthorized(self) -> None:
"""
Tests of the setattr method for the case that we want to manually
overwrite an attribute.
"""
resource = ResourceTypesBase(**self.given_data)
self.assertRaises(AttributeError, lambda: resource.__setattr__("name", "hello"))
def test_setitem(self) -> None:
"""
Tests of the setitem method.
"""
expected = "Hello, World!"
resource = ResourceTypesBase(**self.given_data)
# pylint: disable=protected-access
resource["_PyTravisCI"] = "Hello, World!"
actual = resource["_PyTravisCI"]
self.assertEqual(expected, actual)
def test_setitem_overwrite(self) -> None:
"""
Tests of the item method for the case that we want to overwrite all
attributes.
"""
to_overwrite = {
"_at_type": "user",
"_at_href": "/user/5555555555555555555555555555555",
"_at_representation": "standard",
"_at_permissions": {"read": True, "sync": True},
"id": 5555555555555555555555555555555,
"login": "foobar",
"name": "Foo Bar",
"github_id": 5555555555555555555555555555555,
"vcs_id": "5555555555555555555555555555555",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T14:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["hehehe", "huhuhu"],
}
resource = ResourceTypesBase(**self.given_data)
resource["__dict__"] = to_overwrite
for index, value in to_overwrite.items():
self.assertEqual(value, resource[index])
def test_setitem_unauthorized(self) -> None:
"""
Tests of the setitem method for the case that we want to manually
overwrite an attribute.
"""
resource = ResourceTypesBase(**self.given_data)
self.assertRaises(AttributeError, lambda: resource.__setitem__("name", "hello"))
def test_iteration(self) -> None:
"""
Tests of the iteration capabilities.
"""
given_data = {
"__iter_through__": "users",
"users": [self.given_data for _ in range(3)],
}
resource = ResourceTypesBase(**given_data)
for index, data in enumerate(resource):
self.assertEqual(given_data["users"][index], data)
def test_iteration_nothing_to_iter(self) -> None:
"""
Tests of the iteration capabilities for the case that there is nothing
to iterate through.
"""
resource = ResourceTypesBase(**self.given_data)
self.assertRaises(NotImplementedError, lambda: enumerate(resource))
def test_equality(self) -> None:
"""
Tests of the equality comparison.
"""
first_resource = ResourceTypesBase(**self.given_data)
second_resource = ResourceTypesBase(**self.given_data)
expected = True
actual = first_resource == second_resource
self.assertEqual(expected, actual)
def test_not_equality(self) -> None:
"""
Tests of the (not) equallity comparison.
"""
given_data = {
"__iter_through__": "users",
"users": [self.given_data for _ in range(3)],
}
first_resource = ResourceTypesBase(**self.given_data)
second_resource = ResourceTypesBase(**given_data)
expected = False
actual = first_resource == second_resource
self.assertEqual(expected, actual)
def test_inequality(self) -> None:
"""
Tests of the inquallity comparison.
"""
given_data = {
"__iter_through__": "users",
"users": [self.given_data for _ in range(3)],
}
first_resource = ResourceTypesBase(**self.given_data)
second_resource = ResourceTypesBase(**given_data)
expected = True
actual = first_resource != second_resource
self.assertEqual(expected, actual)
def test_not_inequality(self) -> None:
"""
Tests of the (not) inquallity comparison.
"""
first_resource = ResourceTypesBase(**self.given_data)
second_resource = ResourceTypesBase(**self.given_data)
expected = False
actual = first_resource != second_resource
self.assertEqual(expected, actual)
def test_json(self) -> None:
"""
Tests of the method which let us convert to JSON.
"""
expected = {
"@type": "user",
"@href": "/user/4549848944894848948949",
"@representation": "standard",
"@permissions": {"read": True, "sync": True},
"id": 4549848944894848948949,
"login": "foobar",
"name": "Foo Bar",
"github_id": 1148942198798789784897949849484523106,
"vcs_id": "1148942198798789784897949849484523106",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T15:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["foo", "bar"],
}
resource = ResourceTypesBase(**self.given_data)
converted = resource.json()
self.assertIsInstance(converted, str)
self.assertEqual(expected, json.loads(converted))
def test_to_json(self) -> None:
"""
Tests of the method which let us convert to JSON.
"""
expected_base = {
"@type": "user",
"@href": "/user/4549848944894848948949",
"@representation": "standard",
"@permissions": {"read": True, "sync": True},
"id": 4549848944894848948949,
"login": "foobar",
"name": "Foo Bar",
"github_id": 1148942198798789784897949849484523106,
"vcs_id": "1148942198798789784897949849484523106",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T15:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["foo", "bar"],
}
expected = copy.deepcopy(expected_base)
expected["related_user"] = expected_base
given_data = copy.deepcopy(self.given_data)
given_data["related_user"] = ResourceTypesBase(**given_data)
resource = ResourceTypesBase(**given_data)
converted = resource.to_json()
self.assertIsInstance(converted, str)
self.assertEqual(expected, json.loads(converted))
def test_to_json_tags_not_wanted(self) -> None:
"""
Tests of the method which let us convert to JSON for the case that
we don't want any tags.
"""
expected = {
"id": 4549848944894848948949,
"login": "foobar",
"name": "Foo Bar",
"github_id": 1148942198798789784897949849484523106,
"vcs_id": "1148942198798789784897949849484523106",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T15:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["foo", "bar"],
}
resource = ResourceTypesBase(**self.given_data)
converted = resource.to_json(remove_tags=True)
self.assertIsInstance(converted, str)
self.assertEqual(expected, json.loads(converted))
def test_dict(self) -> None:
"""
Tests of the method which let us convert to :py:class:`dict`.
"""
expected = {
"@type": "user",
"@href": "/user/4549848944894848948949",
"@representation": "standard",
"@permissions": {"read": True, "sync": True},
"id": 4549848944894848948949,
"login": "foobar",
"name": "Foo Bar",
"github_id": 1148942198798789784897949849484523106,
"vcs_id": "1148942198798789784897949849484523106",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T15:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["foo", "bar"],
}
resource = ResourceTypesBase(**self.given_data)
actual = resource.dict()
self.assertIsInstance(actual, dict)
self.assertEqual(expected, actual)
def test_to_dict(self) -> None:
"""
Tests of the method which let us convert to :py:class:`dict`.
"""
expected = {
"@type": "user",
"@href": "/user/4549848944894848948949",
"@representation": "standard",
"@permissions": {"read": True, "sync": True},
"id": 4549848944894848948949,
"login": "foobar",
"name": "Foo Bar",
"github_id": 1148942198798789784897949849484523106,
"vcs_id": "1148942198798789784897949849484523106",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T15:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["foo", "bar"],
}
resource = ResourceTypesBase(**self.given_data)
actual = resource.to_dict()
self.assertIsInstance(actual, dict)
self.assertEqual(expected, actual)
def test_to_dict_deep_relationships(self) -> None:
"""
Tests of the method which let us convert to :py:class:`dict` for the
case that we have some nested resources.
"""
expected_base = {
"@type": "user",
"@href": "/user/4549848944894848948949",
"@representation": "standard",
"@permissions": {"read": True, "sync": True},
"id": 4549848944894848948949,
"login": "foobar",
"name": "Foo Bar",
"github_id": 1148942198798789784897949849484523106,
"vcs_id": "1148942198798789784897949849484523106",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T15:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["foo", "bar"],
}
expected = copy.deepcopy(expected_base)
expected["related_user"] = copy.deepcopy(expected_base)
given_data = copy.deepcopy(self.given_data)
given_data["related_user"] = ResourceTypesBase(**given_data)
resource = ResourceTypesBase(**given_data)
actual = resource.to_dict()
self.assertIsInstance(actual, dict)
self.assertEqual(expected, actual)
def test_to_dict_tags_not_wanted(self) -> None:
"""
Tests of the method which let us convert to :py:class:`dict` for the
case that we don't want any tags.
"""
expected = {
"id": 4549848944894848948949,
"login": "foobar",
"name": "Foo Bar",
"github_id": 1148942198798789784897949849484523106,
"vcs_id": "1148942198798789784897949849484523106",
"vcs_type": "GithubUser",
"avatar_url": None,
"education": False,
"allow_migration": False,
"email": "foo@example.org",
"is_syncing": False,
"synced_at": "2020-10-14T15:53:08Z",
"recently_signed_up": False,
"secure_user_hash": None,
"usernames": ["foo", "bar"],
}
given_data = copy.deepcopy(self.given_data)
given_data["_PyTravisCI"] = {"hello": "world"}
resource = ResourceTypesBase(**given_data)
actual = resource.to_dict(remove_tags=True)
self.assertIsInstance(actual, dict)
self.assertEqual(expected, actual)
def test_has_next_page(self):
"""
Tests of the method which let us check if the current resource has
a next page.
"""
resource = ResourceTypesBase(**self.pagination_data)
expected = True
actual = resource.has_next_page()
self.assertEqual(expected, actual)
def test_has_next_page_not(self):
"""
Tests of the methods which let us check if the current resource has
a next page for the case that there is no pager.
"""
resource = ResourceTypesBase(**self.given_data)
expected = False
actual = resource.has_next_page()
self.assertEqual(expected, actual)
def test_has_previous_page(self):
"""
Tests of the method which let us check if the current resource has
a previous page.
"""
resource = ResourceTypesBase(**self.pagination_data)
expected = True
actual = resource.has_previous_page()
self.assertEqual(expected, actual)
def test_has_previous_page_not(self):
"""
Tests of the methods which let us check if the current resource has
a previous page for the case that there is no pager.
"""
resource = ResourceTypesBase(**self.given_data)
expected = False
actual = resource.has_previous_page()
self.assertEqual(expected, actual)
def test_has_first_page(self):
"""
Tests of the method which let us check if the current resource has
a first page.
"""
resource = ResourceTypesBase(**self.pagination_data)
expected = True
actual = resource.has_first_page()
self.assertEqual(expected, actual)
def test_has_first_page_not(self):
"""
Tests of the methods which let us check if the current resource has
a previous page for the case that there is no pager.
"""
resource = ResourceTypesBase(**self.given_data)
expected = False
actual = resource.has_first_page()
self.assertEqual(expected, actual)
def test_has_last_page(self):
"""
Tests of the method which let us check if the current resource has
a last page.
"""
resource = ResourceTypesBase(**self.pagination_data)
expected = True
actual = resource.has_last_page()
self.assertEqual(expected, actual)
def test_has_last_page_not(self):
"""
Tests of the methods which let us check if the current resource has
a last page for the case that there is no pager.
"""
resource = ResourceTypesBase(**self.given_data)
expected = False
actual = resource.has_last_page()
self.assertEqual(expected, actual)
def test_is_incomplete(self):
"""
Tests of the method which let us check if the current resource is
incomplete.
"""
resource = ResourceTypesBase(**self.given_data_minimal)
expected = True
actual = resource.is_incomplete()
self.assertEqual(expected, actual)
def test_is_not_incomplete(self):
"""
Tests of the method which let us check if the current resource is
incomplete for the case that it is not actually incomplete.
"""
resource = ResourceTypesBase(**self.given_data)
expected = False
actual = resource.is_incomplete()
self.assertEqual(expected, actual)
if __name__ == "__main__":
launch_tests()
| 31.133508
| 94
| 0.58257
| 2,394
| 23,786
| 5.605681
| 0.124896
| 0.040909
| 0.02608
| 0.065872
| 0.826751
| 0.814307
| 0.793741
| 0.76848
| 0.755961
| 0.745231
| 0
| 0.084954
| 0.306189
| 23,786
| 763
| 95
| 31.174312
| 0.728231
| 0.174136
| 0
| 0.745455
| 0
| 0
| 0.213203
| 0.049044
| 0
| 0
| 0
| 0
| 0.093182
| 1
| 0.077273
| false
| 0
| 0.013636
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b0644b5f5f77d44391ad97da6f2318d7995887ec
| 144
|
py
|
Python
|
logrun/utils/__init__.py
|
dccsillag/exlog
|
8e235dbd602093352a2001388381a1233ea09974
|
[
"MIT"
] | 2
|
2021-11-22T17:28:25.000Z
|
2021-11-22T18:11:30.000Z
|
logrun/utils/__init__.py
|
dccsillag/exlog
|
8e235dbd602093352a2001388381a1233ea09974
|
[
"MIT"
] | 1
|
2021-03-18T22:57:16.000Z
|
2021-03-19T13:57:28.000Z
|
logrun/utils/__init__.py
|
dccsillag/logrun
|
8e235dbd602093352a2001388381a1233ea09974
|
[
"MIT"
] | null | null | null |
"""
This subpackage contains the functionality you will use most of the time.
"""
from logrun.utils import general
from logrun.utils import ml
| 20.571429
| 73
| 0.777778
| 22
| 144
| 5.090909
| 0.772727
| 0.178571
| 0.267857
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159722
| 144
| 6
| 74
| 24
| 0.92562
| 0.506944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
05c6da54a17b1978448cdbae9278def684c743c8
| 26,782
|
py
|
Python
|
test_nagiosnotify_slack.py
|
wendelfleming/NagiosNotify
|
84544fb513bd5e6a29dfabbe9314e933daa2f5b5
|
[
"Apache-2.0"
] | null | null | null |
test_nagiosnotify_slack.py
|
wendelfleming/NagiosNotify
|
84544fb513bd5e6a29dfabbe9314e933daa2f5b5
|
[
"Apache-2.0"
] | null | null | null |
test_nagiosnotify_slack.py
|
wendelfleming/NagiosNotify
|
84544fb513bd5e6a29dfabbe9314e933daa2f5b5
|
[
"Apache-2.0"
] | 1
|
2020-04-01T06:34:01.000Z
|
2020-04-01T06:34:01.000Z
|
# Copyright 2019 University of Southern California Information Sciences Institute All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
import urllib
import json
import os
from nagiosnotify_slack import SlackNotify
class TestSlackNotify(unittest.TestCase):
# __web_hook_url = "http://put.a.real.value.here"
__web_hook_url = "https://hooks.slack.com/services/PUT URL HERE"
__primary = "#primary"
# __primary = "#alerts"
__secondary = "#secondary"
# __secondary = "#alerts-updates-dev"
__override = "#override"
# __override = "#alerts-criticals"
__nagios_server = ""
__list_servicedisplaynames = ["Yum Update", "APT Update"]
__list_state = ["WARNING"]
__service_message = "HOST: {0} Some service message"
__service_message_list = ["NAGIOS_HOSTNAME"]
__host_message = "HOST: {0} Some host message"
__host_message_list = ["NAGIOS_HOSTNAME"]
__slack_botname = "nagios"
__test = False
def __get_default_slacknotify(self):
slack_notify = SlackNotify()
slack_notify._PRIMARY_CHANNEL = self.__primary
slack_notify._SECONDARY_CHANNEL = self.__secondary
slack_notify._NAGIOS_SERVER = self.__nagios_server
slack_notify._LIST_SERVICEDISPLAYNAMES = self.__list_servicedisplaynames
slack_notify._LIST_STATE = self.__list_state
slack_notify._SERVICE_MESSAGE = self.__service_message
slack_notify._SERVICE_MESSAGE_LIST = self.__service_message_list
slack_notify._HOST_MESSAGE = self.__host_message
slack_notify._HOST_MESSAGE_LIST = self.__host_message_list
slack_notify._WEB_HOOK_URL = self.__web_hook_url
slack_notify._SLACK_BOTNAME = self.__slack_botname
slack_notify._TEST = self.__test
return slack_notify
def setUp(self):
self.__mock_slack_notify = self.__get_default_slacknotify()
def test_get_args(self):
mock_argv_list = ["nagiosnotify_slack.py"]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
def test_get_args_override(self):
override_channel = "#override"
mock_argv_list = ["nagiosnotify_slack.py", "-c", override_channel]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertEqual(override_channel, args.channel)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
def test_get_args_testmode(self):
mock_argv_list = ["nagiosnotify_slack.py", "-t"]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertTrue(args.test)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertNotEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertTrue(self.__mock_slack_notify._TEST)
def test_get_args_clear(self):
mock_argv_list = ["nagiosnotify_slack.py", "-clr"]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertTrue(args.clear)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertListEqual([], self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual([], self.__mock_slack_notify._LIST_STATE)
def test_get_args_skiplink(self):
mock_argv_list = ["nagiosnotify_slack.py", "-sk"]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertFalse(args.skiplink)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
def test_get_args_primary_channel(self):
primary_channel = "#new_primary"
mock_argv_list = ["nagiosnotify_slack.py", "-pc", primary_channel]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertEqual(primary_channel, args.primary)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertNotEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(primary_channel, self.__mock_slack_notify._PRIMARY_CHANNEL)
def test_get_args_secondary_channel(self):
secondary_channel = "#new_secondary"
mock_argv_list = ["nagiosnotify_slack.py", "-sc", secondary_channel]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertEqual(secondary_channel, args.secondary)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertNotEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(secondary_channel, self.__mock_slack_notify._SECONDARY_CHANNEL)
def test_get_args_nagios_server(self):
nagios_server = "nagios2.somedomain.com"
mock_argv_list = ["nagiosnotify_slack.py", "-n", nagios_server]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertEqual(nagios_server, args.nagios)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertNotEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertEqual(nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
def test_get_args_service_list(self):
svc_list = ["Ping", "Load"]
mock_argv_list = ["nagiosnotify_slack.py", "-svc", svc_list[0], svc_list[1]]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
for old_val in self.__list_servicedisplaynames:
self.assertNotIn(old_val, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(svc_list, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
def test_get_args_state_list(self):
state_list = ["CRITICAL", "DOWN"]
mock_argv_list = ["nagiosnotify_slack.py", "-st", state_list[0], state_list[1]]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
for old_val in self.__list_state:
self.assertNotIn(old_val, self.__mock_slack_notify._LIST_STATE)
self.assertListEqual(state_list, self.__mock_slack_notify._LIST_STATE)
def test_get_args_host_message(self):
host_message = "HOST: {0} STATUS: {1}"
mock_argv_list = ["nagiosnotify_slack.py", "-hm", host_message]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertNotEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertEqual(host_message, self.__mock_slack_notify._HOST_MESSAGE)
def test_get_args_host_message_list(self):
host_message_list = ["NAGIOS_HOSTNAME", "NAGIOS_HOSTSTATUS"]
mock_argv_list = ["nagiosnotify_slack.py", "-hl", host_message_list[0], host_message_list[1]]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertListEqual(host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
def test_get_args_service_message(self):
service_message = "HOST: {0} SERVICE: {1}"
mock_argv_list = ["nagiosnotify_slack.py", "-sm", service_message]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertNotEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertEqual(service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
def test_get_args_service_message_list(self):
service_message_list = ["NAGIOS_HOSTNAME", "NAGIOS_SERVICESTATE"]
mock_argv_list = ["nagiosnotify_slack.py", "-sl", service_message_list[0], service_message_list[1]]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
args = self.__mock_slack_notify._get_args()
k.stop()
self.assertIsNone(args.channel)
self.assertEqual(self.__primary, self.__mock_slack_notify._PRIMARY_CHANNEL)
self.assertEqual(self.__secondary, self.__mock_slack_notify._SECONDARY_CHANNEL)
self.assertEqual(self.__nagios_server, self.__mock_slack_notify._NAGIOS_SERVER)
self.assertListEqual(self.__list_servicedisplaynames, self.__mock_slack_notify._LIST_SERVICEDISPLAYNAMES)
self.assertListEqual(self.__list_state, self.__mock_slack_notify._LIST_STATE)
self.assertEqual(self.__host_message, self.__mock_slack_notify._HOST_MESSAGE)
self.assertListEqual(self.__host_message_list, self.__mock_slack_notify._HOST_MESSAGE_LIST)
self.assertEqual(self.__service_message, self.__mock_slack_notify._SERVICE_MESSAGE)
self.assertEqual(self.__web_hook_url, self.__mock_slack_notify._WEB_HOOK_URL)
self.assertEqual(self.__slack_botname, self.__mock_slack_notify._SLACK_BOTNAME)
self.assertEqual(self.__test, self.__mock_slack_notify._TEST)
self.assertListEqual(service_message_list, self.__mock_slack_notify._SERVICE_MESSAGE_LIST)
def test_get_link(self):
host_name = "host01"
link_builder = [" <https://", self.__nagios_server, "/nagiosxi/includes/components/xicore/status.php?host=",
host_name, "|See Nagios>"]
html_link_expected = "".join(link_builder)
html_link = self.__mock_slack_notify._get_link(host_name)
self.assertEqual(html_link_expected, html_link)
def test_get_icon(self):
service_state_list = ["CRITICAL", "WARNING", "OK"]
host_state_list = ["DOWN", "UP", "UNKNOWN", "SOMEUNKNOWNSTATE"]
icon = self.__mock_slack_notify._get_icon(service_state_list[0])
self.assertEqual(":x: ", icon)
icon = self.__mock_slack_notify._get_icon(service_state_list[1])
self.assertEqual(":warning: ", icon)
icon = self.__mock_slack_notify._get_icon(service_state_list[2])
self.assertEqual(":white_check_mark: ", icon)
icon = self.__mock_slack_notify._get_icon(host_state_list[0])
self.assertEqual(":x: ", icon)
icon = self.__mock_slack_notify._get_icon(host_state_list[1])
self.assertEqual(":white_check_mark: ", icon)
icon = self.__mock_slack_notify._get_icon(host_state_list[2])
self.assertEqual(":question: ", icon)
icon = self.__mock_slack_notify._get_icon(host_state_list[3])
self.assertEqual(":white_medium_square: ", icon)
def test_get_request_data(self):
nagios_hostname = "host1"
mock_argv_list = ["nagiosnotify_slack.py"]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
mock_env_list = {"NAGIOS_HOSTNAME":nagios_hostname,
"NAGIOS_HOSTSTATE":"DOWN",
"NAGIOS_HOSTOUTPUT":"Host message testing"}
l = mock.patch.dict(os.environ, mock_env_list)
l.start()
results = self.__mock_slack_notify._get_request_data()
l.stop()
k.stop()
self.assertEqual(self.__web_hook_url, results['url'])
text_str_builder = [":x: HOST: ", nagios_hostname, " Some host message <https://",
self.__nagios_server, "/nagiosxi/includes/components/xicore/status.php?host=",
nagios_hostname, "|See Nagios>"]
expected_data = {
"username" : self.__slack_botname,
"channel" : self.__primary,
"text" : ''.join(text_str_builder)
}
actual_data = json.loads(results['data']['payload'])
self.assertDictEqual(expected_data, actual_data)
def test_get_request_data_override(self):
nagios_hostname = "host1"
override_channel = self.__override
mock_argv_list = ["nagiosnotify_slack.py", "-c", override_channel, "-sk"]
k = mock.patch('sys.argv', mock_argv_list)
k.start()
mock_env_list = {"NAGIOS_HOSTNAME":nagios_hostname,
"NAGIOS_HOSTSTATE":"DOWN",
"NAGIOS_HOSTOUTPUT":"Host message testing"}
l = mock.patch.dict(os.environ, mock_env_list)
l.start()
results = self.__mock_slack_notify._get_request_data()
l.stop()
k.stop()
text_str_builder = [":x: HOST: ", nagios_hostname, " Some host message"]
expected_data = {
"username" : self.__slack_botname,
"channel" : override_channel,
"text" : ''.join(text_str_builder)
}
actual_data = json.loads(results['data']['payload'])
self.assertDictEqual(expected_data, actual_data)
if __name__ == '__main__':
unittest.main()
| 53.671343
| 116
| 0.745351
| 3,294
| 26,782
| 5.404675
| 0.06527
| 0.132843
| 0.146773
| 0.214514
| 0.872606
| 0.838847
| 0.811998
| 0.788631
| 0.777172
| 0.766725
| 0
| 0.001515
| 0.162161
| 26,782
| 498
| 117
| 53.779116
| 0.79188
| 0.028265
| 0
| 0.653266
| 0
| 0
| 0.056455
| 0.018652
| 0
| 0
| 0
| 0
| 0.512563
| 1
| 0.050251
| false
| 0
| 0.015075
| 0
| 0.103015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
05e6b0730694e080e2aabbe9afee0ec87aa2bfda
| 15,121
|
py
|
Python
|
tests/test_move_legality.py
|
atw1020/sente
|
ebc6150124ad0bd82b0f5c414cbedb242e3c3a37
|
[
"MIT"
] | 3
|
2021-07-30T02:17:52.000Z
|
2021-12-11T15:57:04.000Z
|
tests/test_move_legality.py
|
atw1020/sente
|
ebc6150124ad0bd82b0f5c414cbedb242e3c3a37
|
[
"MIT"
] | null | null | null |
tests/test_move_legality.py
|
atw1020/sente
|
ebc6150124ad0bd82b0f5c414cbedb242e3c3a37
|
[
"MIT"
] | null | null | null |
"""
Author: Arthur Wesley
"""
from unittest import TestCase
import sente
from assert_does_not_raise import DoesNotRaiseTestCase
class TestMove(DoesNotRaiseTestCase):
def test_make_move(self):
"""
tests to see if we can make a simple move
:return:
"""
game = sente.Game()
game.play(3, 3, sente.stone.BLACK)
self.assertEqual(sente.stone.BLACK, game.get_point(3, 3))
def test_capture_stone(self):
"""
tests to see if the board correctly removes stones
:return:
"""
game = sente.Game()
game.play(2, 3, sente.stone.BLACK)
game.play(3, 3, sente.stone.WHITE)
game.play(4, 3, sente.stone.BLACK)
game.play(18, 18, sente.stone.WHITE)
game.play(3, 2, sente.stone.BLACK)
game.play(18, 17, sente.stone.WHITE)
game.play(3, 4, sente.stone.BLACK)
self.assertEqual(sente.stone.EMPTY, game.get_point(3, 3))
def test_capture_multiple_stones(self):
"""
checks to see if a group of multiple stones can be captured
:return:
"""
game = sente.Game()
game.play(2, 3, sente.stone.BLACK)
game.play(3, 3, sente.stone.WHITE)
game.play(4, 3, sente.stone.BLACK)
game.play(3, 2, sente.stone.WHITE)
game.play(2, 2, sente.stone.BLACK)
game.play(18, 18, sente.stone.WHITE)
game.play(4, 2, sente.stone.BLACK)
game.play(18, 17, sente.stone.WHITE)
game.play(3, 1, sente.stone.BLACK)
game.play(18, 16, sente.stone.WHITE)
game.play(3, 4, sente.stone.BLACK)
self.assertEqual(sente.stone.EMPTY, game.get_point(3, 3))
self.assertEqual(sente.stone.EMPTY, game.get_point(3, 2))
def test_capture_multiple_groups(self):
"""
tests to see if multiple stones can be captured with one move
:return:
"""
game = sente.Game()
game.play(1, 3, sente.stone.BLACK)
game.play(1, 2, sente.stone.WHITE)
game.play(2, 2, sente.stone.BLACK)
game.play(2, 1, sente.stone.WHITE)
game.play(3, 1, sente.stone.BLACK)
game.play(19, 19, sente.stone.WHITE)
game.play(1, 1, sente.stone.BLACK)
self.assertEqual(sente.stone.EMPTY, game.get_point(1, 2))
self.assertEqual(sente.stone.EMPTY, game.get_point(2, 1))
def test_capture_edge(self):
"""
checks to see if stones on the edge of the board can be captured
:return:
"""
game = sente.Game()
game.play(1, 1, sente.stone.BLACK)
game.play(2, 2, sente.stone.WHITE)
game.play(1, 2, sente.stone.BLACK)
game.play(1, 3, sente.stone.WHITE)
game.play(2, 1, sente.stone.BLACK)
game.play(3, 1, sente.stone.WHITE)
self.assertEqual(sente.stone.EMPTY, game.get_point(1, 1))
self.assertEqual(sente.stone.EMPTY, game.get_point(2, 1))
self.assertEqual(sente.stone.EMPTY, game.get_point(1, 2))
def test_pass_none(self):
"""
verifies that you can pass a move by passing none
:return:
"""
game = sente.Game()
with self.assertDoesNotRaise(sente.exceptions.IllegalMoveException):
game.play(None)
game.play(None)
self.assertTrue(game.is_over())
def test_pss_method(self):
"""
makes sure that the pass method works
:return: None
"""
game = sente.Game()
with self.assertDoesNotRaise(sente.exceptions.IllegalMoveException):
game.pss()
game.pss()
self.assertTrue(game.is_over())
def test_pass_move(self):
"""
checks to see if passing a pass move causes a pass
:return:
"""
game = sente.Game()
with self.assertDoesNotRaise(sente.exceptions.IllegalMoveException):
game.play(sente.moves.Pass(sente.stone.BLACK))
game.play(sente.moves.Pass(sente.stone.WHITE))
self.assertTrue(game.is_over())
def test_self_atari_legal(self):
"""
makes sure that self atari is a legal move
:return:
"""
game = sente.Game()
game.play(1, 2)
self.assertTrue(game.is_legal(1, 1))
with self.assertDoesNotRaise(sente.exceptions.IllegalMoveException):
game.play(1, 1)
class TestLegalMove(TestCase):
def test_empty_correct_color(self):
"""
tests to if the correct color is detected (assuming MakeMove does not work
:return:
"""
# create a 19x19 board
game = sente.Game()
self.assertTrue(game.is_legal(3, 3, sente.stone.BLACK))
self.assertFalse(game.is_legal(3, 3, sente.stone.WHITE))
self.assertTrue(game.is_legal(sente.Move(15, 15, sente.stone.BLACK)))
self.assertFalse(game.is_legal(sente.Move(15, 15, sente.stone.WHITE)))
def test_correct_color(self):
"""
tests to see if the color is detected
:return:
"""
game = sente.Game()
game.play(3, 3, sente.stone.BLACK)
self.assertTrue(game.is_legal(15, 3, sente.stone.WHITE))
self.assertFalse(game.is_legal(15, 3, sente.stone.BLACK))
self.assertTrue(game.is_legal(sente.Move(15, 3, sente.stone.WHITE)))
self.assertFalse(game.is_legal(sente.Move(15, 3, sente.stone.BLACK)))
def test_empty_out_of_bounds(self):
"""
checks to see if out of bounds coords are illegal
:return:
"""
game = sente.Game()
self.assertTrue(game.is_legal(19, 19, sente.stone.BLACK))
self.assertFalse(game.is_legal(20, 19, sente.stone.BLACK))
self.assertFalse(game.is_legal(19, 20, sente.stone.BLACK))
# internal indexing
self.assertTrue(game.is_legal(sente.Move(18, 18, sente.stone.BLACK)))
self.assertFalse(game.is_legal(sente.Move(19, 18, sente.stone.BLACK)))
self.assertFalse(game.is_legal(sente.Move(18, 19, sente.stone.BLACK)))
def test_occupied_space(self):
"""
checks to see if playing on an occupied space is illegal
:return:
"""
game = sente.Game()
game.play(2, 3, sente.stone.BLACK)
game.play(15, 3, sente.stone.WHITE)
self.assertFalse(game.is_legal(2, 3, sente.stone.BLACK))
self.assertFalse(game.is_legal(15, 3, sente.stone.BLACK))
def test_self_capture(self):
"""
checks to see if a self-capture move is illegal
:return:
"""
game = sente.Game()
game.play(1, 2, sente.stone.BLACK)
game.play(19, 19, sente.stone.WHITE)
game.play(2, 1, sente.stone.BLACK)
self.assertFalse(game.is_legal(1, 1, sente.stone.WHITE))
def test_group_self_capture(self):
"""
checks to see self capture moves are illegal for groups of stones
:return:
"""
game = sente.Game()
game.play(1, 3, sente.stone.BLACK)
game.play(1, 2, sente.stone.WHITE)
game.play(2, 3, sente.stone.BLACK)
game.play(2, 2, sente.stone.WHITE)
game.play(3, 2, sente.stone.BLACK)
game.play(2, 1, sente.stone.WHITE)
game.play(3, 1, sente.stone.BLACK)
self.assertFalse(game.is_legal(1, 1))
def test_empty_triangle_liberties(self):
"""
checks to see if a group's overlapping liberties matter
:return:
"""
game = sente.Game()
game.play(1, 3)
game.play(1, 2)
game.play(2, 3)
game.play(2, 2)
game.play(3, 2)
game.play(2, 1)
game.play(3, 1)
game.play(19, 19)
self.assertTrue(game.is_legal(1, 1))
def test_ko(self):
"""
checks to see if the game correctly recognizes a Ko move as illegal
:return:
"""
game = sente.Game()
game.play(2, 3, sente.stone.BLACK)
game.play(3, 3, sente.stone.WHITE)
game.play(4, 3, sente.stone.BLACK)
game.play(1, 3, sente.stone.WHITE)
game.play(3, 2, sente.stone.BLACK)
game.play(2, 4, sente.stone.WHITE)
game.play(3, 4, sente.stone.BLACK)
game.play(2, 2, sente.stone.WHITE)
# play away before taking the ko
game.play(18, 18, sente.stone.BLACK)
game.play(3, 3, sente.stone.WHITE) # take the Ko
self.assertFalse(game.is_legal(2, 3, sente.stone.BLACK))
def test_inactive_ko(self):
"""
checks to see if a ko gos inactive after making a ko threat
:return:
"""
game = sente.Game()
game.play(3, 4, sente.stone.BLACK)
game.play(4, 4, sente.stone.WHITE)
game.play(5, 4, sente.stone.BLACK)
game.play(2, 4, sente.stone.WHITE)
game.play(4, 3, sente.stone.BLACK)
game.play(3, 5, sente.stone.WHITE)
game.play(4, 5, sente.stone.BLACK)
game.play(3, 3, sente.stone.WHITE)
# play away before taking the ko
game.play(19, 19, sente.stone.BLACK)
game.play(4, 4, sente.stone.WHITE) # take the Ko
# simulate a ko threat
game.play(19, 1, sente.stone.BLACK)
game.play(18, 1, sente.stone.WHITE)
# the Ko should no longer be active
self.assertTrue(game.is_legal(3, 4, sente.stone.BLACK))
game.play(3, 4, sente.stone.BLACK)
# it should now be illegal for white to play here
self.assertFalse(game.is_legal(4, 4, sente.stone.WHITE))
def test_zero_zero_illegal(self):
"""
tests to see if playing on the zero zero point is illegal
:return:
"""
game = sente.Game()
self.assertFalse(game.is_legal(0, 0))
class IllegalMoveThrowsException(DoesNotRaiseTestCase):
"""
makes sure that making illegal moves throws an exception
"""
def test_empty_correct_color(self):
"""
tests to if the correct color is detected (assuming MakeMove does not work
:return:
"""
# create a 19x19 board
game = sente.Game()
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(3, 3, sente.stone.WHITE)
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(15, 15, sente.stone.WHITE)
def test_correct_color(self):
"""
tests to see if the color is detected
:return:
"""
game = sente.Game()
game.play(3, 3, sente.stone.BLACK)
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(15, 3, sente.stone.BLACK)
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(sente.Move(15, 3, sente.stone.BLACK))
def test_empty_out_of_bounds(self):
"""
checks to see if out of bounds coords are illegal
:return:
"""
game = sente.Game()
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(20, 19, sente.stone.BLACK)
game.play(19, 20, sente.stone.BLACK)
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(sente.Move(20, 19, sente.stone.BLACK))
game.play(sente.Move(19, 20, sente.stone.BLACK))
def test_occupied_space(self):
"""
checks to see if playing on an occupied space is illegal
:return:
"""
game = sente.Game()
game.play(2, 3, sente.stone.BLACK)
game.play(15, 3, sente.stone.WHITE)
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(2, 3, sente.stone.BLACK)
game.play(15, 3, sente.stone.BLACK)
def test_self_capture(self):
"""
checks to see if a self-capture move is illegal
:return:
"""
game = sente.Game()
game.play(1, 2, sente.stone.BLACK)
game.play(19, 19, sente.stone.WHITE)
game.play(2, 1, sente.stone.BLACK)
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(1, 1, sente.stone.WHITE)
def test_group_self_capture(self):
"""
checks to see self capture moves are illegal for groups of stones
:return:
"""
game = sente.Game()
game.play(1, 3, sente.stone.BLACK)
game.play(1, 2, sente.stone.WHITE)
game.play(2, 3, sente.stone.BLACK)
game.play(2, 2, sente.stone.WHITE)
game.play(3, 2, sente.stone.BLACK)
game.play(2, 1, sente.stone.WHITE)
game.play(3, 1, sente.stone.BLACK)
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(1, 1)
def test_ko(self):
"""
checks to see if the game correctly recognizes a Ko move as illegal
:return:
"""
game = sente.Game()
game.play(2, 3, sente.stone.BLACK)
game.play(3, 3, sente.stone.WHITE)
game.play(4, 3, sente.stone.BLACK)
game.play(1, 3, sente.stone.WHITE)
game.play(3, 2, sente.stone.BLACK)
game.play(2, 4, sente.stone.WHITE)
game.play(3, 4, sente.stone.BLACK)
game.play(2, 2, sente.stone.WHITE)
# play away before taking the ko
game.play(18, 18, sente.stone.BLACK)
game.play(3, 3, sente.stone.WHITE) # take the Ko
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(2, 3, sente.stone.BLACK)
def test_inactive_ko(self):
"""
checks to see if a ko gos inactive after making a ko threat
:return:
"""
game = sente.Game()
game.play(3, 4, sente.stone.BLACK)
game.play(4, 4, sente.stone.WHITE)
game.play(5, 4, sente.stone.BLACK)
game.play(2, 4, sente.stone.WHITE)
game.play(4, 3, sente.stone.BLACK)
game.play(3, 5, sente.stone.WHITE)
game.play(4, 5, sente.stone.BLACK)
game.play(3, 3, sente.stone.WHITE)
# play away before taking the ko
game.play(19, 19, sente.stone.BLACK)
game.play(4, 4, sente.stone.WHITE) # take the Ko
# simulate a ko threat
game.play(19, 1, sente.stone.BLACK)
game.play(18, 1, sente.stone.WHITE)
# the Ko should no longer be active
with self.assertDoesNotRaise(sente.exceptions.IllegalMoveException):
game.play(3, 4, sente.stone.BLACK)
# it should now be illegal for white to play here
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(4, 4, sente.stone.WHITE)
def test_zero_zero_illegal(self):
"""
tests to see if playing on the zero zero point is illegal
:return:
"""
game = sente.Game()
with self.assertRaises(sente.exceptions.IllegalMoveException):
game.play(0, 0)
| 24.747954
| 82
| 0.587064
| 2,043
| 15,121
| 4.292707
| 0.069016
| 0.168757
| 0.141961
| 0.112657
| 0.909236
| 0.888255
| 0.878563
| 0.830445
| 0.788826
| 0.748803
| 0
| 0.037175
| 0.291978
| 15,121
| 610
| 83
| 24.788525
| 0.781991
| 0.156207
| 0
| 0.768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.216
| 1
| 0.112
| false
| 0.016
| 0.012
| 0
| 0.136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
05f030fca94f99a441a71345cbe5a0b587e88f0f
| 2,113
|
py
|
Python
|
blog/2020-01rgr/code/hf_estimators.py
|
daviddewhurst/daviddewhurst.github.io
|
d782bc7e8f9c821705fc83c7705006f40e29f360
|
[
"CC0-1.0"
] | null | null | null |
blog/2020-01rgr/code/hf_estimators.py
|
daviddewhurst/daviddewhurst.github.io
|
d782bc7e8f9c821705fc83c7705006f40e29f360
|
[
"CC0-1.0"
] | null | null | null |
blog/2020-01rgr/code/hf_estimators.py
|
daviddewhurst/daviddewhurst.github.io
|
d782bc7e8f9c821705fc83c7705006f40e29f360
|
[
"CC0-1.0"
] | null | null | null |
import numpy as np
def kaplan_meier_sf(marks, events=None, surviving=None):
marks = np.array(marks)
if events is None:
events = np.ones(marks.shape[0])
else:
events = np.array(events)
if surviving is None:
surviving = marks.shape[0] - np.linspace(0, marks.shape[0] - 1, marks.shape[0])
arr = 1. - events / surviving
return np.cumprod(arr)
def kaplan_meier_cumhf(marks, events=None, surviving=None):
sf = kaplan_meier_sf(marks, events=events, surviving=surviving)
return -1. * np.log(sf)
def kaplan_meier_hf(marks, events=None, surviving=None, returndim='same'):
cumhf = kaplan_meier_cumhf(marks, events=events, surviving=surviving)
# interpolate
if returndim == 'same':
dim = marks.shape[0] + 1
x_interp = np.linspace(marks.min(), marks.max(), dim)
cumhf_interp = np.interp(
x_interp,
marks,
cumhf
)
else:
raise NotImplementedError('Currently only returndim == same is implemented')
dx = np.diff(x_interp)
d_cumhf = np.diff(cumhf_interp)
return x_interp[1:], d_cumhf / dx
def nelson_aalen_cumhf(marks, events=None, surviving=None):
marks = np.array(marks)
if events is None:
events = np.ones(marks.shape[0])
else:
events = np.array(events)
if surviving is None:
surviving = marks.shape[0] - np.linspace(0, marks.shape[0] - 1, marks.shape[0])
arr = events / surviving
return np.cumsum(arr)
def nelson_aalen_hf(marks, events=None, surviving=None, returndim='same'):
cumhf = nelson_aalen_cumhf(marks, events=events, surviving=surviving)
# interpolate
if returndim == 'same':
dim = marks.shape[0] + 1
x_interp = np.linspace(marks.min(), marks.max(), dim)
cumhf_interp = np.interp(
x_interp,
marks,
cumhf
)
else:
raise NotImplementedError('Currently only returndim == same is implemented')
dx = np.diff(x_interp)
d_cumhf = np.diff(cumhf_interp)
return x_interp[1:], d_cumhf / dx
| 31.073529
| 87
| 0.626124
| 280
| 2,113
| 4.610714
| 0.164286
| 0.077459
| 0.085205
| 0.092951
| 0.896204
| 0.814872
| 0.785438
| 0.785438
| 0.785438
| 0.711077
| 0
| 0.012715
| 0.255561
| 2,113
| 67
| 88
| 31.537313
| 0.80801
| 0.010885
| 0
| 0.703704
| 0
| 0
| 0.052733
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.092593
| false
| 0
| 0.018519
| 0
| 0.203704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
af226cfce5a6d0af4f02d92d573b4c352f09cc96
| 7,256
|
py
|
Python
|
calculation.py
|
mw00847/OMP
|
b2cd0e4d212e359e11afb5940e25d20018f07053
|
[
"MIT"
] | null | null | null |
calculation.py
|
mw00847/OMP
|
b2cd0e4d212e359e11afb5940e25d20018f07053
|
[
"MIT"
] | null | null | null |
calculation.py
|
mw00847/OMP
|
b2cd0e4d212e359e11afb5940e25d20018f07053
|
[
"MIT"
] | null | null | null |
import psi4
import numpy as np
psi4.set_options({'scf_type': 'df',
'freeze_core': 'true'})
#different combinations of the isomers
#O/M = comparison1
#O/P = comparison2
#M/P = comparison3
#comparison1 O/M
comparison1 = psi4.geometry("""
0 1
O 1.72900 1.44890 1.30290
O 0.52150 -2.76860 0.50990
O 1.94280 1.60280 -0.95480
O 1.94610 -1.35340 -0.55510
C -0.07460 0.73300 -0.00740
C -0.28440 -0.64490 -0.05810
C -1.16380 1.60440 0.00750
C -1.58340 -1.15170 -0.09400
C -2.46290 1.09750 -0.02840
C -2.67270 -0.28050 -0.07920
C 1.27000 1.29310 0.03190
C 0.83250 -1.58060 -0.07500
H -1.01340 2.68020 0.04680
H -1.78090 -2.21920 -0.14740
H -3.31100 1.77580 -0.01770
H -3.68430 -0.67460 -0.11040
H 2.63430 1.82640 1.32040
H 1.27850 -3.39240 0.49470
--
0 1
O -3.54720 0.18420 0.00030
O 3.54710 0.18450 -0.00070
O -2.58510 -1.87520 -0.00050
O 2.58550 -1.87510 0.00060
C -1.20810 0.08130 0.00030
C 1.20790 0.08110 -0.00010
C -0.00020 -0.61630 0.00010
C -1.20790 1.47630 0.00020
C 1.20790 1.47610 -0.00020
C 0.00000 2.17360 0.00010
C -2.46790 -0.64530 -0.00040
C 2.46800 -0.64530 0.00040
H -0.00020 -1.70390 0.00010
H -2.12470 2.05900 0.00020
H 2.12470 2.05880 -0.00020
H 0.00010 3.25980 0.00000
H -4.39140 -0.31530 0.00040
H 4.39150 -0.31500 -0.00100
""")
psi4.energy('sapt0/jun-cc-pvdz', molecule=comparison1)
one_disp = psi4.variable('SSAPT0 DISP ENERGY')
one_elst = psi4.variable('SSAPT0 ELST ENERGY')
one_exch = psi4.variable('SSAPT0 EXCH ENERGY')
one_ind = psi4.variable('SSAPT0 IND ENERGY')
one_tot =psi4.variable('SSAPT0 TOTAL ENERGY')
#comparison2 O/P
comparison2 = psi4.geometry("""
0 1
O 1.72900 1.44890 1.30290
O 0.52150 -2.76860 0.50990
O 1.94280 1.60280 -0.95480
O 1.94610 -1.35340 -0.55510
C -0.07460 0.73300 -0.00740
C -0.28440 -0.64490 -0.05810
C -1.16380 1.60440 0.00750
C -1.58340 -1.15170 -0.09400
C -2.46290 1.09750 -0.02840
C -2.67270 -0.28050 -0.07920
C 1.27000 1.29310 0.03190
C 0.83250 -1.58060 -0.07500
H -1.01340 2.68020 0.04680
H -1.78090 -2.21920 -0.14740
H -3.31100 1.77580 -0.01770
H -3.68430 -0.67460 -0.11040
H 2.63430 1.82640 1.32040
H 1.27850 -3.39240 0.49470
--
0 1
O 3.39660 1.18370 -0.00030
O -3.39650 -1.18360 -0.00070
O 3.54230 -1.08460 0.00020
O -3.54230 1.08470 0.00000
C 1.39460 -0.03050 -0.00050
C -1.39450 0.03050 0.00050
C 0.72350 1.19240 -0.00030
C -0.67100 1.22300 0.00030
C 0.67090 -1.22310 -0.00020
C -0.72370 -1.19250 0.00030
C 2.84840 -0.06240 0.00030
C -2.84820 0.06250 0.00030
H 1.24760 2.14410 -0.00030
H -1.17530 2.18580 0.00040
H 1.17520 -2.18590 -0.00010
H -1.24770 -2.14420 0.00050
H 4.37700 1.15110 -0.00040
H -4.37700 -1.15080 -0.00130
""")
psi4.energy('sapt0/jun-cc-pvdz', molecule=comparison2)
two_disp = psi4.variable('SSAPT0 DISP ENERGY')
two_elst = psi4.variable('SSAPT0 ELST ENERGY')
two_exch = psi4.variable('SSAPT0 EXCH ENERGY')
two_ind = psi4.variable('SSAPT0 IND ENERGY')
two_tot =psi4.variable('SSAPT0 TOTAL ENERGY')
#comparison3 M/P
comparison3 = psi4.geometry("""
0 1
O -3.54720 0.18420 0.00030
O 3.54710 0.18450 -0.00070
O -2.58510 -1.87520 -0.00050
O 2.58550 -1.87510 0.00060
C -1.20810 0.08130 0.00030
C 1.20790 0.08110 -0.00010
C -0.00020 -0.61630 0.00010
C -1.20790 1.47630 0.00020
C 1.20790 1.47610 -0.00020
C 0.00000 2.17360 0.00010
C -2.46790 -0.64530 -0.00040
C 2.46800 -0.64530 0.00040
H -0.00020 -1.70390 0.00010
H -2.12470 2.05900 0.00020
H 2.12470 2.05880 -0.00020
H 0.00010 3.25980 0.00000
H -4.39140 -0.31530 0.00040
H 4.39150 -0.31500 -0.00100
--
0 1
O 3.39660 1.18370 -0.00030
O -3.39650 -1.18360 -0.00070
O 3.54230 -1.08460 0.00020
O -3.54230 1.08470 0.00000
C 1.39460 -0.03050 -0.00050
C -1.39450 0.03050 0.00050
C 0.72350 1.19240 -0.00030
C -0.67100 1.22300 0.00030
C 0.67090 -1.22310 -0.00020
C -0.72370 -1.19250 0.00030
C 2.84840 -0.06240 0.00030
C -2.84820 0.06250 0.00030
H 1.24760 2.14410 -0.00030
H -1.17530 2.18580 0.00040
H 1.17520 -2.18590 -0.00010
H -1.24770 -2.14420 0.00050
H 4.37700 1.15110 -0.00040
H -4.37700 -1.15080 -0.00130
""")
psi4.energy('sapt0/jun-cc-pvdz', molecule=comparison3)
three_disp = psi4.variable('SSAPT0 DISP ENERGY')
three_elst = psi4.variable('SSAPT0 ELST ENERGY')
three_exch = psi4.variable('SSAPT0 EXCH ENERGY')
three_ind = psi4.variable('SSAPT0 IND ENERGY')
three_tot =psi4.variable('SSAPT0 TOTAL ENERGY')
#comparing the interaction energies for each dimer
labels = np.array(['disp','elst','exch','ind','tot'])
dispersion = np.array([one_disp,two_disp,three_disp])
elst = np.array([one_elst,two_elst,three_elst])
exch = np.array([one_exch,two_exch,three_exch])
ind = np.array([one_ind,two_ind,three_ind])
ind = np.array([one_ind,two_ind,three_ind])
tot = np.array([one_tot,two_tot,three_tot])
#plotting these comparisons
plt.plot(comparison,tot,'-ob')
plt.plot(comparison,ind,'-xr')
plt.plot(comparison,exch,'-py')
plt.plot(comparison,elst,'-dk')
plt.plot(comparison,dispersion,'-m<')
plt.show()
| 32.981818
| 54
| 0.471472
| 1,043
| 7,256
| 3.245446
| 0.186002
| 0.010635
| 0.079764
| 0.004727
| 0.806499
| 0.802954
| 0.662925
| 0.653471
| 0.653471
| 0.635746
| 0
| 0.477311
| 0.419928
| 7,256
| 219
| 55
| 33.13242
| 0.326919
| 0.031422
| 0
| 0.772152
| 0
| 0
| 0.81642
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012658
| 0
| 0.012658
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
af2be619feb76b614c7fd26758d27b29ac7d6d5d
| 64,712
|
py
|
Python
|
vim-mocker/app/pishahang-os/heat/static_response.py
|
CN-UPB/MANO-Benchmarking-Framework
|
6a1d3fd231663042853a52b2be02619e836d3280
|
[
"Apache-2.0"
] | null | null | null |
vim-mocker/app/pishahang-os/heat/static_response.py
|
CN-UPB/MANO-Benchmarking-Framework
|
6a1d3fd231663042853a52b2be02619e836d3280
|
[
"Apache-2.0"
] | 1
|
2021-03-25T23:31:21.000Z
|
2021-03-25T23:31:21.000Z
|
vim-mocker/app/pishahang-os/heat/static_response.py
|
CN-UPB/MANO-Benchmarking-Framework
|
6a1d3fd231663042853a52b2be02619e836d3280
|
[
"Apache-2.0"
] | null | null | null |
# Replace
# stack_created["id"]
# stack_created["links"]["href"]
stack_created = {
"stack": {
"id": "{stack_id}",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}",
"rel": "self"
}
]
}
}
found = "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}"
def create_started_0(stack_id, stack_name):
create_started_0 = {
"stack": {
"parent": None,
"disable_rollback": True,
"description": "No description",
"parameters": {
"OS::project_id": "40d9de036960447dafd7d74d306cf189",
"OS::stack_id": stack_id,
"OS::stack_name": stack_name
},
"deletion_time": None,
"stack_name": stack_name,
"stack_user_project_id": "19a10c8e62c04fb7987d87aeda2474b8",
"stack_status_reason": "Stack CREATE started",
"creation_time": "2020-03-02T14:05:54Z",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
}
],
"capabilities": [
],
"notification_topics": [
],
"tags": None,
"timeout_mins": None,
"stack_status": "CREATE_IN_PROGRESS",
"stack_owner": None,
"updated_time": None,
"id": stack_id,
"outputs": [
],
"template_description": "No description"
}
}
return create_started_0
def create_complete_1(stack_id, stack_name):
create_complete_1 = {
"stack": {
"parent": None,
"disable_rollback": True,
"description": "No description",
"parameters": {
"OS::project_id": "40d9de036960447dafd7d74d306cf189",
"OS::stack_id": stack_id,
"OS::stack_name": stack_name
},
"deletion_time": None,
"stack_name": stack_name,
"stack_user_project_id": "19a10c8e62c04fb7987d87aeda2474b8",
"stack_status_reason": "Stack CREATE completed successfully",
"creation_time": "2020-03-02T14:05:54Z",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
}
],
"capabilities": [
],
"notification_topics": [
],
"tags": None,
"timeout_mins": None,
"stack_status": "CREATE_COMPLETE",
"stack_owner": None,
"updated_time": None,
"id": stack_id,
"outputs": [
],
"template_description": "No description"
}
}
return create_complete_1
def stack_update_started_2(stack_id, stack_name):
stack_update_started_2 = {
"stack": {
"parent": None,
"disable_rollback": True,
"description": "No description",
"parameters": {
"OS::project_id": "40d9de036960447dafd7d74d306cf189",
"OS::stack_id": stack_id,
"OS::stack_name": stack_name
},
"deletion_time": None,
"stack_name": stack_name,
"stack_user_project_id": "19a10c8e62c04fb7987d87aeda2474b8",
"stack_status_reason": "Stack UPDATE started",
"creation_time": "2020-03-02T14:05:54Z",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
}
],
"capabilities": [
],
"notification_topics": [
],
"tags": None,
"timeout_mins": None,
"stack_status": "UPDATE_IN_PROGRESS",
"stack_owner": None,
"updated_time": "2020-03-02T14:06:11Z",
"id": stack_id,
"outputs": [
],
"template_description": "No description"
}
}
return stack_update_started_2
def stack_update_completed_3(stack_id, stack_name):
stack_update_completed_3 = {
"stack": {
"parent": None,
"disable_rollback": True,
"description": "No description",
"parameters": {
"OS::project_id": "40d9de036960447dafd7d74d306cf189",
"OS::stack_id": stack_id,
"OS::stack_name": stack_name
},
"deletion_time": None,
"stack_name": stack_name,
"stack_user_project_id": "19a10c8e62c04fb7987d87aeda2474b8",
"stack_status_reason": "Stack UPDATE completed successfully",
"creation_time": "2020-03-02T14:05:54Z",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
}
],
"capabilities": [
],
"notification_topics": [
],
"tags": None,
"timeout_mins": None,
"stack_status": "UPDATE_COMPLETE",
"stack_owner": None,
"updated_time": "2020-03-02T14:06:11Z",
"id": stack_id,
"outputs": [
],
"template_description": "No description"
}
}
return stack_update_completed_3
template = {
"heat_template_version": "2015-04-30",
"resources": {
"SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::RouterInterface",
"properties": {
"subnet": {
"get_resource": "SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"router": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1"
}
},
"SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Subnet",
"properties": {
"cidr": "10.0.1.160/27",
"gateway_ip": "10.0.1.161",
"name": "SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"network": {
"get_resource": "SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
}
},
"SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Subnet",
"properties": {
"gateway_ip": "10.0.1.65",
"cidr": "10.0.1.64/27",
"dns_nameservers": [
"8.8.8.8"
],
"name": "SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"network": {
"get_resource": "SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
}
},
"SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Subnet",
"properties": {
"cidr": "10.0.1.192/27",
"gateway_ip": "10.0.1.193",
"name": "SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"network": {
"get_resource": "SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
}
},
"SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Net",
"properties": {
"name": "SonatService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
},
"SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Net",
"properties": {
"name": "SonatService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
},
"SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Net",
"properties": {
"name": "SonatService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
},
"SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::RouterInterface",
"properties": {
"subnet": {
"get_resource": "SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"router": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1"
}
},
"SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Subnet",
"properties": {
"gateway_ip": "10.0.1.97",
"cidr": "10.0.1.96/27",
"dns_nameservers": [
"8.8.8.8"
],
"name": "SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"network": {
"get_resource": "SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
}
},
"SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Net",
"properties": {
"name": "SonatService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
},
"SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Subnet",
"properties": {
"cidr": "10.0.1.128/27",
"gateway_ip": "10.0.1.129",
"name": "SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"network": {
"get_resource": "SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
}
},
"SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::RouterInterface",
"properties": {
"subnet": {
"get_resource": "SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"router": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1"
}
},
"SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::Net",
"properties": {
"name": "SonatService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
}
},
"SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"type": "OS::Neutron::RouterInterface",
"properties": {
"subnet": {
"get_resource": "SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"router": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1"
}
}
}
}
patch = "The request is accepted for processing."
def resources(stack_id, stack_name):
resources = {
"resources": [
{
"resource_name": "cirros-image-1_6b4c2ba0-02f2-4e77-8185-e770c4757f3b_spAddressCloudConfig",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/cirros-image-1_6b4c2ba0-02f2-4e77-8185-e770c4757f3b_spAddressCloudConfig".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "cirros-image-1_6b4c2ba0-02f2-4e77-8185-e770c4757f3b_spAddressCloudConfig",
"creation_time": "2020-03-02T14:06:13Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:06:11Z",
"required_by": [
"cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "45f24dea-7a6f-4aa4-9395-3025a59b2797",
"resource_type": "OS::Heat::CloudConfig"
},
{
"resource_name": "SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=bb7e71a2-932e-4fe1-9dfa-8e5ade668da4",
"resource_type": "OS::Neutron::RouterInterface"
},
{
"resource_name": "SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=913dce42-6790-4732-b944-7b9c4887ac10",
"resource_type": "OS::Neutron::RouterInterface"
},
{
"resource_name": "SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "913dce42-6790-4732-b944-7b9c4887ac10",
"resource_type": "OS::Neutron::Subnet"
},
{
"resource_name": "SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
"SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "68fe0d27-3e79-432a-994d-77fedf6022ba",
"resource_type": "OS::Neutron::Subnet"
},
{
"resource_name": "SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
"SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "bb7e71a2-932e-4fe1-9dfa-8e5ade668da4",
"resource_type": "OS::Neutron::Subnet"
},
{
"resource_name": "SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "331d5c5d-bba9-4b0b-adf5-da83f4861fc0",
"resource_type": "OS::Neutron::Net"
},
{
"resource_name": "SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "0661b73a-5008-4cb6-a01c-0ffa58d8a0b9",
"resource_type": "OS::Neutron::Net"
},
{
"resource_name": "SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "c61f2f91-94b6-49b4-b920-d0c3689e8342",
"resource_type": "OS::Neutron::Net"
},
{
"resource_name": "cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}-cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b-tzgmdqfr6ur2/a45679f4-643a-4ba4-845a-c3c9ade12b6b".format(stack_name=stack_name),
"rel": "nested"
}
],
"logical_resource_id": "cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:06:12Z",
"resource_status_reason": "state changed",
"updated_time": "2020-03-02T14:06:11Z",
"required_by": [
],
"resource_status": "CREATE_COMPLETE",
"physical_resource_id": "a45679f4-643a-4ba4-845a-c3c9ade12b6b",
"resource_type": "OS::Heat::ResourceGroup"
},
{
"resource_name": "SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
"cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "8ebb0e88-c5b3-42bf-8a8f-f6105ba222a7",
"resource_type": "OS::Neutron::Subnet"
},
{
"resource_name": "SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "67485f03-bdd5-480b-aabe-61c08feebf29",
"resource_type": "OS::Neutron::Subnet"
},
{
"resource_name": "SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "457e1c8c-fe4e-4ca0-a9cb-6419ae621584",
"resource_type": "OS::Neutron::Net"
},
{
"resource_name": "cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:06:12Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:06:11Z",
"required_by": [
"cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "b3809c9e-15ef-4201-8974-35bf41a960d1",
"resource_type": "OS::Neutron::Port"
},
{
"resource_name": "SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=67485f03-bdd5-480b-aabe-61c08feebf29",
"resource_type": "OS::Neutron::RouterInterface"
},
{
"resource_name": "SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "41b267eb-30d7-4704-8bae-0ff94320cea4",
"resource_type": "OS::Neutron::Net"
},
{
"resource_name": "SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b".format(stack_name=stack_name, stack_id=stack_id),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": "SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=68fe0d27-3e79-432a-994d-77fedf6022ba",
"resource_type": "OS::Neutron::RouterInterface"
}
]
}
return resources
def resources_status(stack_id, stack_name, resource_name):
resources_status = {
"resource": {
"resource_name": resource_name,
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}/resources/{resource_name}".format(stack_name=stack_name, stack_id=stack_id, resource_name=resource_name),
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/{stack_name}/{stack_id}".format(stack_name=stack_name, stack_id=stack_id),
"rel": "stack"
}
],
"logical_resource_id": resource_name,
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=bb7e71a2-932e-4fe1-9dfa-8e5ade668da4",
"attributes": {
},
"resource_type": "OS::Neutron::RouterInterface"
}
}
return resources_status
resources_status_individual = {
"cirros-image-1_6b4c2ba0-02f2-4e77-8185-e770c4757f3b_spAddressCloudConfig": {
"resource": {
"resource_name": "cirros-image-1_6b4c2ba0-02f2-4e77-8185-e770c4757f3b_spAddressCloudConfig",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/cirros-image-1_6b4c2ba0-02f2-4e77-8185-e770c4757f3b_spAddressCloudConfig",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "cirros-image-1_6b4c2ba0-02f2-4e77-8185-e770c4757f3b_spAddressCloudConfig",
"creation_time": "2020-03-02T14:06:13Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:06:11Z",
"required_by": [
"cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "45f24dea-7a6f-4aa4-9395-3025a59b2797",
"attributes": {
"config": "#cloud-config\nwrite_files:\n- {content: 'SP_ADDRESS=131.234.28.240\n\n', path: /etc/sonata_sp_address.conf}\n"
},
"resource_type": "OS::Heat::CloudConfig"
}
},
"SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=bb7e71a2-932e-4fe1-9dfa-8e5ade668da4",
"attributes": {
},
"resource_type": "OS::Neutron::RouterInterface"
}
},
"SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=913dce42-6790-4732-b944-7b9c4887ac10",
"attributes": {
},
"resource_type": "OS::Neutron::RouterInterface"
}
},
"SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.input.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "913dce42-6790-4732-b944-7b9c4887ac10",
"attributes": {
"service_types": [
],
"description": "",
"enable_dhcp": True,
"tags": [
],
"network_id": "331d5c5d-bba9-4b0b-adf5-da83f4861fc0",
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:05:58Z",
"dns_nameservers": [
],
"updated_at": "2020-03-02T14:05:58Z",
"ipv6_ra_mode": None,
"allocation_pools": [
{
"start": "10.0.1.162",
"end": "10.0.1.190"
}
],
"host_routes": [
],
"revision_number": 2,
"ipv6_address_mode": None,
"ip_version": 4,
"gateway_ip": "10.0.1.161",
"cidr": "10.0.1.160/27",
"project_id": "40d9de036960447dafd7d74d306cf189",
"id": "913dce42-6790-4732-b944-7b9c4887ac10",
"subnetpool_id": None,
"name": "SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"resource_type": "OS::Neutron::Subnet"
}
},
"SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
"SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "68fe0d27-3e79-432a-994d-77fedf6022ba",
"attributes": {
"service_types": [
],
"description": "",
"enable_dhcp": True,
"tags": [
],
"network_id": "0661b73a-5008-4cb6-a01c-0ffa58d8a0b9",
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:06:01Z",
"dns_nameservers": [
"8.8.8.8"
],
"updated_at": "2020-03-02T14:06:01Z",
"ipv6_ra_mode": None,
"allocation_pools": [
{
"start": "10.0.1.66",
"end": "10.0.1.94"
}
],
"host_routes": [
],
"revision_number": 2,
"ipv6_address_mode": None,
"ip_version": 4,
"gateway_ip": "10.0.1.65",
"cidr": "10.0.1.64/27",
"project_id": "40d9de036960447dafd7d74d306cf189",
"id": "68fe0d27-3e79-432a-994d-77fedf6022ba",
"subnetpool_id": None,
"name": "SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"resource_type": "OS::Neutron::Subnet"
}
},
"SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
"SonataService.output.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "bb7e71a2-932e-4fe1-9dfa-8e5ade668da4",
"attributes": {
"service_types": [
],
"description": "",
"enable_dhcp": True,
"tags": [
],
"network_id": "c61f2f91-94b6-49b4-b920-d0c3689e8342",
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:06:02Z",
"dns_nameservers": [
],
"updated_at": "2020-03-02T14:06:02Z",
"ipv6_ra_mode": None,
"allocation_pools": [
{
"start": "10.0.1.194",
"end": "10.0.1.222"
}
],
"host_routes": [
],
"revision_number": 2,
"ipv6_address_mode": None,
"ip_version": 4,
"gateway_ip": "10.0.1.193",
"cidr": "10.0.1.192/27",
"project_id": "40d9de036960447dafd7d74d306cf189",
"id": "bb7e71a2-932e-4fe1-9dfa-8e5ade668da4",
"subnetpool_id": None,
"name": "SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"resource_type": "OS::Neutron::Subnet"
}
},
"SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.input.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "331d5c5d-bba9-4b0b-adf5-da83f4861fc0",
"attributes": {
"provider:physical_network": None,
"ipv6_address_scope": None,
"revision_number": 5,
"port_security_enabled": True,
"provider:network_type": "vxlan",
"id": "331d5c5d-bba9-4b0b-adf5-da83f4861fc0",
"router:external": False,
"availability_zone_hints": [
],
"availability_zones": [
"nova"
],
"ipv4_address_scope": None,
"shared": False,
"project_id": "40d9de036960447dafd7d74d306cf189",
"status": "ACTIVE",
"subnets": [
"913dce42-6790-4732-b944-7b9c4887ac10"
],
"description": "",
"tags": [
],
"updated_at": "2020-03-02T14:05:58Z",
"provider:segmentation_id": 84,
"name": "SonatService.input.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"admin_state_up": True,
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:05:56Z",
"mtu": 1450
},
"resource_type": "OS::Neutron::Net"
}
},
"SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.mgmt.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "0661b73a-5008-4cb6-a01c-0ffa58d8a0b9",
"attributes": {
"provider:physical_network": None,
"ipv6_address_scope": None,
"revision_number": 5,
"port_security_enabled": True,
"provider:network_type": "vxlan",
"id": "0661b73a-5008-4cb6-a01c-0ffa58d8a0b9",
"router:external": False,
"availability_zone_hints": [
],
"availability_zones": [
"nova"
],
"ipv4_address_scope": None,
"shared": False,
"project_id": "40d9de036960447dafd7d74d306cf189",
"status": "ACTIVE",
"subnets": [
"68fe0d27-3e79-432a-994d-77fedf6022ba"
],
"description": "",
"tags": [
],
"updated_at": "2020-03-02T14:06:01Z",
"provider:segmentation_id": 101,
"name": "SonatService.mgmt.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"admin_state_up": True,
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:05:58Z",
"mtu": 1450
},
"resource_type": "OS::Neutron::Net"
}
},
"SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.output.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "c61f2f91-94b6-49b4-b920-d0c3689e8342",
"attributes": {
"provider:physical_network": None,
"ipv6_address_scope": None,
"revision_number": 5,
"port_security_enabled": True,
"provider:network_type": "vxlan",
"id": "c61f2f91-94b6-49b4-b920-d0c3689e8342",
"router:external": False,
"availability_zone_hints": [
],
"availability_zones": [
"nova"
],
"ipv4_address_scope": None,
"shared": False,
"project_id": "40d9de036960447dafd7d74d306cf189",
"status": "ACTIVE",
"subnets": [
"bb7e71a2-932e-4fe1-9dfa-8e5ade668da4"
],
"description": "",
"tags": [
],
"updated_at": "2020-03-02T14:06:02Z",
"provider:segmentation_id": 31,
"name": "SonatService.output.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"admin_state_up": True,
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:05:59Z",
"mtu": 1450
},
"resource_type": "OS::Neutron::Net"
}
},
"cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b-cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b-tzgmdqfr6ur2/a45679f4-643a-4ba4-845a-c3c9ade12b6b",
"rel": "nested"
}
],
"logical_resource_id": "cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:06:12Z",
"resource_status_reason": "state changed",
"updated_time": "2020-03-02T14:06:11Z",
"required_by": [
],
"resource_status": "CREATE_COMPLETE",
"physical_resource_id": "a45679f4-643a-4ba4-845a-c3c9ade12b6b",
"attributes": {
"attributes": None,
"refs": None,
"refs_map": None,
"removed_rsrc_list": [
]
},
"resource_type": "OS::Heat::ResourceGroup"
}
},
"SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
"cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "8ebb0e88-c5b3-42bf-8a8f-f6105ba222a7",
"attributes": {
"service_types": [
],
"description": "",
"enable_dhcp": True,
"tags": [
],
"network_id": "457e1c8c-fe4e-4ca0-a9cb-6419ae621584",
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:06:04Z",
"dns_nameservers": [
],
"updated_at": "2020-03-02T14:06:04Z",
"ipv6_ra_mode": None,
"allocation_pools": [
{
"start": "10.0.1.130",
"end": "10.0.1.158"
}
],
"host_routes": [
],
"revision_number": 2,
"ipv6_address_mode": None,
"ip_version": 4,
"gateway_ip": "10.0.1.129",
"cidr": "10.0.1.128/27",
"project_id": "40d9de036960447dafd7d74d306cf189",
"id": "8ebb0e88-c5b3-42bf-8a8f-f6105ba222a7",
"subnetpool_id": None,
"name": "SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"resource_type": "OS::Neutron::Subnet"
}
},
"SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "67485f03-bdd5-480b-aabe-61c08feebf29",
"attributes": {
"service_types": [
],
"description": "",
"enable_dhcp": True,
"tags": [
],
"network_id": "41b267eb-30d7-4704-8bae-0ff94320cea4",
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:05:59Z",
"dns_nameservers": [
"8.8.8.8"
],
"updated_at": "2020-03-02T14:05:59Z",
"ipv6_ra_mode": None,
"allocation_pools": [
{
"start": "10.0.1.98",
"end": "10.0.1.126"
}
],
"host_routes": [
],
"revision_number": 2,
"ipv6_address_mode": None,
"ip_version": 4,
"gateway_ip": "10.0.1.97",
"cidr": "10.0.1.96/27",
"project_id": "40d9de036960447dafd7d74d306cf189",
"id": "67485f03-bdd5-480b-aabe-61c08feebf29",
"subnetpool_id": None,
"name": "SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
},
"resource_type": "OS::Neutron::Subnet"
}
},
"SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.internal.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "457e1c8c-fe4e-4ca0-a9cb-6419ae621584",
"attributes": {
"provider:physical_network": None,
"ipv6_address_scope": None,
"revision_number": 5,
"port_security_enabled": True,
"provider:network_type": "vxlan",
"id": "457e1c8c-fe4e-4ca0-a9cb-6419ae621584",
"router:external": False,
"availability_zone_hints": [
],
"availability_zones": [
"nova"
],
"ipv4_address_scope": None,
"shared": False,
"project_id": "40d9de036960447dafd7d74d306cf189",
"status": "ACTIVE",
"subnets": [
"8ebb0e88-c5b3-42bf-8a8f-f6105ba222a7"
],
"description": "",
"tags": [
],
"updated_at": "2020-03-02T14:06:04Z",
"provider:segmentation_id": 107,
"name": "SonatService.internal.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"admin_state_up": True,
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:06:00Z",
"mtu": 1450
},
"resource_type": "OS::Neutron::Net"
}
},
"cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:06:12Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:06:11Z",
"required_by": [
"cirros-image-1.cirros-image-1.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "b3809c9e-15ef-4201-8974-35bf41a960d1",
"attributes": {
"allowed_address_pairs": [
],
"extra_dhcp_opts": [
],
"updated_at": "2020-03-02T14:06:30Z",
"device_owner": "compute:None",
"revision_number": 9,
"port_security_enabled": True,
"binding:profile": {
},
"fixed_ips": [
{
"subnet_id": "8ebb0e88-c5b3-42bf-8a8f-f6105ba222a7",
"ip_address": "10.0.1.133"
}
],
"id": "b3809c9e-15ef-4201-8974-35bf41a960d1",
"security_groups": [
"a2167217-dae9-4469-afe0-29d2c87a1110"
],
"binding:vif_details": {
"port_filter": True,
"ovs_hybrid_plug": False
},
"binding:vif_type": "ovs",
"mac_address": "fa:16:3e:17:33:b0",
"project_id": "40d9de036960447dafd7d74d306cf189",
"status": "ACTIVE",
"binding:host_id": "pishahang-os",
"description": "",
"tags": [
],
"device_id": "0052738d-f6b2-4ce0-8e30-5162eaaa0791",
"name": "cirros-image-1.cirros-image-1.eth0.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"admin_state_up": True,
"network_id": "457e1c8c-fe4e-4ca0-a9cb-6419ae621584",
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:06:17Z",
"binding:vnic_type": "normal"
},
"resource_type": "OS::Neutron::Port"
}
},
"SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.ext.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=67485f03-bdd5-480b-aabe-61c08feebf29",
"attributes": {
},
"resource_type": "OS::Neutron::RouterInterface"
}
},
"SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:55Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:55Z",
"required_by": [
"SonataService.external.subnet.6b4c2ba0-02f2-4e77-8185-e770c4757f3b"
],
"resource_status_reason": "state changed",
"physical_resource_id": "41b267eb-30d7-4704-8bae-0ff94320cea4",
"attributes": {
"provider:physical_network": None,
"ipv6_address_scope": None,
"revision_number": 5,
"port_security_enabled": True,
"provider:network_type": "vxlan",
"id": "41b267eb-30d7-4704-8bae-0ff94320cea4",
"router:external": False,
"availability_zone_hints": [
],
"availability_zones": [
"nova"
],
"ipv4_address_scope": None,
"shared": False,
"project_id": "40d9de036960447dafd7d74d306cf189",
"status": "ACTIVE",
"subnets": [
"67485f03-bdd5-480b-aabe-61c08feebf29"
],
"description": "",
"tags": [
],
"updated_at": "2020-03-02T14:05:59Z",
"provider:segmentation_id": 93,
"name": "SonatService.external.net.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"admin_state_up": True,
"tenant_id": "40d9de036960447dafd7d74d306cf189",
"created_at": "2020-03-02T14:05:57Z",
"mtu": 1450
},
"resource_type": "OS::Neutron::Net"
}
},
"SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b": {
"resource": {
"resource_name": "SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"description": "",
"links": [
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84/resources/SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"rel": "self"
},
{
"href": "http://thesismano2.cs.upb.de:8004/v1/40d9de036960447dafd7d74d306cf189/stacks/SonataService-6b4c2ba0-02f2-4e77-8185-e770c4757f3b/1a2d5e94-c803-486d-b49d-67f712a37b84",
"rel": "stack"
}
],
"logical_resource_id": "SonataService.mgmt.internal.6b4c2ba0-02f2-4e77-8185-e770c4757f3b",
"creation_time": "2020-03-02T14:05:54Z",
"resource_status": "CREATE_COMPLETE",
"updated_time": "2020-03-02T14:05:54Z",
"required_by": [
],
"resource_status_reason": "state changed",
"physical_resource_id": "95a8c9dc-de39-4823-a19a-ff86a54f7bb1:subnet_id=68fe0d27-3e79-432a-994d-77fedf6022ba",
"attributes": {
},
"resource_type": "OS::Neutron::RouterInterface"
}
}
}
| 39.896424
| 266
| 0.637517
| 6,823
| 64,712
| 5.883189
| 0.050125
| 0.067562
| 0.090082
| 0.112603
| 0.970205
| 0.950425
| 0.930943
| 0.926185
| 0.907651
| 0.875339
| 0
| 0.226506
| 0.202327
| 64,712
| 1,622
| 267
| 39.896424
| 0.551134
| 0.000896
| 0
| 0.683733
| 0
| 0.069345
| 0.656293
| 0.242146
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003889
| false
| 0
| 0
| 0
| 0.007777
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
af56ccce5a7ef323f67d85aaae8849e66a22a7f8
| 27,895
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path_/candidate_secondary_paths/candidate_secondary_path/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path_/candidate_secondary_paths/candidate_secondary_path/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path_/candidate_secondary_paths/candidate_secondary_path/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/mpls/lsps/constrained-path/tunnels/tunnel/p2p-tunnel-attributes/p2p-primary-path/p2p-primary-path/candidate-secondary-paths/candidate-secondary-path/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state parameters relating to the candidate
secondary path
"""
__slots__ = (
"_path_helper", "_extmethods", "__secondary_path", "__priority", "__active"
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__secondary_path = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="secondary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
self.__priority = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="priority",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
self.__active = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="active",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"lsps",
"constrained-path",
"tunnels",
"tunnel",
"p2p-tunnel-attributes",
"p2p-primary-path",
"p2p-primary-path",
"candidate-secondary-paths",
"candidate-secondary-path",
"state",
]
def _get_secondary_path(self):
"""
Getter method for secondary_path, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/secondary_path (leafref)
YANG Description: A reference to the secondary path that should be utilised
when the containing primary path option is in use
"""
return self.__secondary_path
def _set_secondary_path(self, v, load=False):
"""
Setter method for secondary_path, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/secondary_path (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_secondary_path is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secondary_path() directly.
YANG Description: A reference to the secondary path that should be utilised
when the containing primary path option is in use
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="secondary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """secondary_path must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="secondary-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
}
)
self.__secondary_path = t
if hasattr(self, "_set"):
self._set()
def _unset_secondary_path(self):
self.__secondary_path = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="secondary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
def _get_priority(self):
"""
Getter method for priority, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/priority (uint16)
YANG Description: The priority of the specified secondary path option. Higher
priority options are less preferable - such that a secondary
path reference with a priority of 0 is the most preferred
"""
return self.__priority
def _set_priority(self, v, load=False):
"""
Setter method for priority, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/priority (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_priority() directly.
YANG Description: The priority of the specified secondary path option. Higher
priority options are less preferable - such that a secondary
path reference with a priority of 0 is the most preferred
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="priority",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """priority must be of a type compatible with uint16""",
"defined-type": "uint16",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)""",
}
)
self.__priority = t
if hasattr(self, "_set"):
self._set()
def _unset_priority(self):
self.__priority = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="priority",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
def _get_active(self):
"""
Getter method for active, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/active (boolean)
YANG Description: Indicates the current active path option that has
been selected of the candidate secondary paths
"""
return self.__active
def _set_active(self, v, load=False):
"""
Setter method for active, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_active() directly.
YANG Description: Indicates the current active path option that has
been selected of the candidate secondary paths
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
is_leaf=True,
yang_name="active",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """active must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__active = t
if hasattr(self, "_set"):
self._set()
def _unset_active(self):
self.__active = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="active",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
secondary_path = __builtin__.property(_get_secondary_path)
priority = __builtin__.property(_get_priority)
active = __builtin__.property(_get_active)
_pyangbind_elements = OrderedDict(
[("secondary_path", secondary_path), ("priority", priority), ("active", active)]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/mpls/lsps/constrained-path/tunnels/tunnel/p2p-tunnel-attributes/p2p-primary-path/p2p-primary-path/candidate-secondary-paths/candidate-secondary-path/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state parameters relating to the candidate
secondary path
"""
__slots__ = (
"_path_helper", "_extmethods", "__secondary_path", "__priority", "__active"
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__secondary_path = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="secondary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
self.__priority = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="priority",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
self.__active = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="active",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"mpls",
"lsps",
"constrained-path",
"tunnels",
"tunnel",
"p2p-tunnel-attributes",
"p2p-primary-path",
"p2p-primary-path",
"candidate-secondary-paths",
"candidate-secondary-path",
"state",
]
def _get_secondary_path(self):
"""
Getter method for secondary_path, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/secondary_path (leafref)
YANG Description: A reference to the secondary path that should be utilised
when the containing primary path option is in use
"""
return self.__secondary_path
def _set_secondary_path(self, v, load=False):
"""
Setter method for secondary_path, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/secondary_path (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_secondary_path is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secondary_path() directly.
YANG Description: A reference to the secondary path that should be utilised
when the containing primary path option is in use
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="secondary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """secondary_path must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="secondary-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
}
)
self.__secondary_path = t
if hasattr(self, "_set"):
self._set()
def _unset_secondary_path(self):
self.__secondary_path = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="secondary-path",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=False,
)
def _get_priority(self):
"""
Getter method for priority, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/priority (uint16)
YANG Description: The priority of the specified secondary path option. Higher
priority options are less preferable - such that a secondary
path reference with a priority of 0 is the most preferred
"""
return self.__priority
def _set_priority(self, v, load=False):
"""
Setter method for priority, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/priority (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_priority() directly.
YANG Description: The priority of the specified secondary path option. Higher
priority options are less preferable - such that a secondary
path reference with a priority of 0 is the most preferred
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="priority",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """priority must be of a type compatible with uint16""",
"defined-type": "uint16",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)""",
}
)
self.__priority = t
if hasattr(self, "_set"):
self._set()
def _unset_priority(self):
self.__priority = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
is_leaf=True,
yang_name="priority",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
def _get_active(self):
"""
Getter method for active, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/active (boolean)
YANG Description: Indicates the current active path option that has
been selected of the candidate secondary paths
"""
return self.__active
def _set_active(self, v, load=False):
"""
Setter method for active, mapped from YANG variable /network_instances/network_instance/mpls/lsps/constrained_path/tunnels/tunnel/p2p_tunnel_attributes/p2p_primary_path/p2p_primary_path/candidate_secondary_paths/candidate_secondary_path/state/active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_active() directly.
YANG Description: Indicates the current active path option that has
been selected of the candidate secondary paths
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
is_leaf=True,
yang_name="active",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """active must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__active = t
if hasattr(self, "_set"):
self._set()
def _unset_active(self):
self.__active = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="active",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
secondary_path = __builtin__.property(_get_secondary_path)
priority = __builtin__.property(_get_priority)
active = __builtin__.property(_get_active)
_pyangbind_elements = OrderedDict(
[("secondary_path", secondary_path), ("priority", priority), ("active", active)]
)
| 42.587786
| 423
| 0.624628
| 3,038
| 27,895
| 5.495721
| 0.069124
| 0.059176
| 0.041926
| 0.048275
| 0.97754
| 0.964542
| 0.964542
| 0.964542
| 0.964542
| 0.964542
| 0
| 0.00831
| 0.283886
| 27,895
| 654
| 424
| 42.652905
| 0.827493
| 0.276573
| 0
| 0.876543
| 0
| 0.012346
| 0.249552
| 0.083317
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045267
| false
| 0
| 0.030864
| 0
| 0.12963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
afd05c03f2100c0d78bbe26faa0306c5b137710e
| 13,112
|
py
|
Python
|
tests/commands/test__vi_j.py
|
uri/Vintageous
|
d5662872bcf1e7439875fe1c5133010db2ace8fd
|
[
"MIT"
] | null | null | null |
tests/commands/test__vi_j.py
|
uri/Vintageous
|
d5662872bcf1e7439875fe1c5133010db2ace8fd
|
[
"MIT"
] | null | null | null |
tests/commands/test__vi_j.py
|
uri/Vintageous
|
d5662872bcf1e7439875fe1c5133010db2ace8fd
|
[
"MIT"
] | null | null | null |
import unittest
from Vintageous.vi.constants import _MODE_INTERNAL_NORMAL
from Vintageous.vi.constants import MODE_NORMAL
from Vintageous.vi.constants import MODE_VISUAL
from Vintageous.vi.constants import MODE_VISUAL_LINE
from Vintageous.tests.commands import set_text
from Vintageous.tests.commands import add_selection
from Vintageous.tests.commands import get_sel
from Vintageous.tests.commands import first_sel
from Vintageous.tests.commands import BufferTest
# TODO: Test against folded regions.
# TODO: Ensure that we only create empty selections while testing. Add assert_all_sels_empty()?
class Test_vi_j_InNormalMode(BufferTest):
def testMoveOne(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=1, b=1)
self.view.run_command('_vi_j', {'mode': MODE_NORMAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 1)
expected = self.R(target, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveMany(self):
set_text(self.view, ''.join(('abc\n',) * 60))
add_selection(self.view, a=1, b=1)
self.view.run_command('_vi_j', {'mode': MODE_NORMAL, 'count': 50, 'xpos': 1})
target = self.view.text_point(50, 1)
expected = self.R(target, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveOntoLongerLine(self):
set_text(self.view, 'foo\nfoo bar\nfoo bar')
add_selection(self.view, a=1, b=1)
self.view.run_command('_vi_j', {'mode': MODE_NORMAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 1)
expected = self.R(target, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveOntoShorterLine(self):
set_text(self.view, 'foo bar\nfoo\nbar')
add_selection(self.view, a=5, b=5)
self.view.run_command('_vi_j', {'mode': MODE_NORMAL, 'count': 1, 'xpos': 5})
target = self.view.text_point(1, 0)
target = self.view.line(target).b - 1
expected = self.R(target, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveFromEmptyLine(self):
set_text(self.view, '\nfoo\nbar')
add_selection(self.view, a=0, b=0)
self.view.run_command('_vi_j', {'mode': MODE_NORMAL, 'count': 1, 'xpos': 0})
target = self.view.text_point(1, 0)
expected = self.R(target, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveFromEmptyLineToEmptyLine(self):
set_text(self.view, '\n\nbar')
add_selection(self.view, a=0, b=0)
self.view.run_command('_vi_j', {'mode': MODE_NORMAL, 'count': 1, 'xpos': 0})
target = self.view.text_point(1, 0)
expected = self.R(target, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveTooFar(self):
set_text(self.view, 'foo\nbar\nbaz')
add_selection(self.view, a=1, b=1)
self.view.run_command('_vi_j', {'mode': MODE_NORMAL, 'count': 10000, 'xpos': 1})
target = self.view.text_point(2, 1)
expected = self.R(target, target)
self.assertEqual(expected, first_sel(self.view))
class Test_vi_j_InVisualMode(BufferTest):
def testMoveOne(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=1, b=2)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 2)
expected = self.R(1, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveReversedNoCrossOver(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=10, b=1)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 1)
expected = self.R(10, target)
self.assertEqual(expected, first_sel(self.view))
# FIXME: This is wrong in the implementation.
def testMoveReversedCrossOver(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=6, b=1)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 2, 'xpos': 1})
target = self.view.text_point(2, 2)
expected = self.R(5, target)
self.assertEqual(expected, first_sel(self.view))
# FIXME: This is wrong in the implementation.
def testMoveReversedCrossOverTooFar(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=6, b=1)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 100, 'xpos': 1})
target = self.view.text_point(2, 2)
expected = self.R(5, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveReversedBackToSameLine(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=6, b=1)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 1)
expected = self.R(target, 6)
self.assertEqual(expected, first_sel(self.view))
def testMoveReversedDownFromSameLine(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=6, b=5)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(2, 2)
expected = self.R(5, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveMany(self):
set_text(self.view, ''.join(('abc\n',) * 60))
add_selection(self.view, a=1, b=2)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 50, 'xpos': 1})
target = self.view.text_point(50, 2)
expected = self.R(1, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveOntoLongerLine(self):
set_text(self.view, 'foo\nfoo bar\nfoo bar')
add_selection(self.view, a=1, b=2)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 2)
expected = self.R(1, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveOntoShorterLine(self):
set_text(self.view, 'foo bar\nfoo\nbar')
add_selection(self.view, a=5, b=6)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 1, 'xpos': 5})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(5, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveFromEmptyLine(self):
set_text(self.view, '\nfoo\nbar')
add_selection(self.view, a=0, b=1)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 1, 'xpos': 0})
target = self.view.text_point(1, 1)
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveFromEmptyLineToEmptyLine(self):
set_text(self.view, '\n\nbar')
add_selection(self.view, a=0, b=1)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 1, 'xpos': 0})
target = self.view.text_point(1, 1)
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveTooFar(self):
set_text(self.view, 'foo\nbar\nbaz')
add_selection(self.view, a=1, b=2)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL, 'count': 10000, 'xpos': 1})
target = self.view.text_point(2, 2)
expected = self.R(1, target)
self.assertEqual(expected, first_sel(self.view))
# TODO: Ensure that we only create empty selections while testing. Add assert_all_sels_empty()?
class Test_vi_j_InInternalNormalMode(BufferTest):
def testMoveOne(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=1, b=1)
self.view.run_command('_vi_j', {'mode': _MODE_INTERNAL_NORMAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveMany(self):
set_text(self.view, ''.join(('abc\n',) * 60))
add_selection(self.view, a=1, b=1)
self.view.run_command('_vi_j', {'mode': _MODE_INTERNAL_NORMAL, 'count': 50, 'xpos': 1})
target = self.view.text_point(50, 2)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveOntoLongerLine(self):
set_text(self.view, 'foo\nfoo bar\nfoo bar')
add_selection(self.view, a=1, b=1)
self.view.run_command('_vi_j', {'mode': _MODE_INTERNAL_NORMAL, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveOntoShorterLine(self):
set_text(self.view, 'foo bar\nfoo\nbar')
add_selection(self.view, a=5, b=5)
self.view.run_command('_vi_j', {'mode': _MODE_INTERNAL_NORMAL, 'count': 1, 'xpos': 5})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveFromEmptyLine(self):
set_text(self.view, '\nfoo\nbar')
add_selection(self.view, a=0, b=0)
self.view.run_command('_vi_j', {'mode': _MODE_INTERNAL_NORMAL, 'count': 1, 'xpos': 0})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveFromEmptyLineToEmptyLine(self):
set_text(self.view, '\n\nbar')
add_selection(self.view, a=0, b=0)
self.view.run_command('_vi_j', {'mode': _MODE_INTERNAL_NORMAL, 'count': 1, 'xpos': 0})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveTooFar(self):
set_text(self.view, 'foo\nbar\nbaz')
add_selection(self.view, a=1, b=1)
self.view.run_command('_vi_j', {'mode': _MODE_INTERNAL_NORMAL, 'count': 10000, 'xpos': 1})
target = self.view.text_point(2, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
class Test_vi_j_InVisualLineMode(BufferTest):
def testMoveOne(self):
set_text(self.view, 'abc\nabc\nabc')
add_selection(self.view, a=0, b=4)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL_LINE, 'count': 1, 'xpos': 1})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveMany(self):
set_text(self.view, ''.join(('abc\n',) * 60))
add_selection(self.view, a=0, b=4)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL_LINE, 'count': 50, 'xpos': 1})
target = self.view.text_point(50, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveFromEmptyLine(self):
set_text(self.view, '\nfoo\nbar')
add_selection(self.view, a=0, b=1)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL_LINE, 'count': 1, 'xpos': 0})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveFromEmptyLineToEmptyLine(self):
set_text(self.view, '\n\nbar')
add_selection(self.view, a=0, b=1)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL_LINE, 'count': 1, 'xpos': 0})
target = self.view.text_point(1, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
def testMoveTooFar(self):
set_text(self.view, 'foo\nbar\nbaz')
add_selection(self.view, a=0, b=4)
self.view.run_command('_vi_j', {'mode': MODE_VISUAL_LINE, 'count': 10000, 'xpos': 1})
target = self.view.text_point(2, 0)
target = self.view.full_line(target).b
expected = self.R(0, target)
self.assertEqual(expected, first_sel(self.view))
| 34.414698
| 99
| 0.615238
| 1,804
| 13,112
| 4.300443
| 0.054324
| 0.174272
| 0.081207
| 0.059938
| 0.959139
| 0.946378
| 0.932586
| 0.915958
| 0.910544
| 0.910544
| 0
| 0.024772
| 0.239552
| 13,112
| 380
| 100
| 34.505263
| 0.753285
| 0.023642
| 0
| 0.828571
| 0
| 0
| 0.074823
| 0
| 0
| 0
| 0
| 0.002632
| 0.126531
| 1
| 0.126531
| false
| 0
| 0.040816
| 0
| 0.183673
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb736b8253d1d933c908dbdcdf5d5b8c539bb5f7
| 5,024
|
py
|
Python
|
todo.py
|
Josue23/todoapp
|
4ac298b841d8ac49c5bccfa59e7e4e16fc034256
|
[
"MIT"
] | null | null | null |
todo.py
|
Josue23/todoapp
|
4ac298b841d8ac49c5bccfa59e7e4e16fc034256
|
[
"MIT"
] | null | null | null |
todo.py
|
Josue23/todoapp
|
4ac298b841d8ac49c5bccfa59e7e4e16fc034256
|
[
"MIT"
] | null | null | null |
from flask import Flask, jsonify, request
from operator import itemgetter
app = Flask('TODO')
tarefas = []
@app.route('/task')
def listar():
return jsonify(tarefas)
@app.route('/task', methods=['POST'])
def criar():
return jsonify()
@app.route('/task', methods=['POST'])
def criar():
titulo = request.json.get('titulo')
descricao = request.json.get('descricao')
tarefa = {
'id': len(tarefas) + 1,
'titulo': titulo,
'descricao': descricao,
'estado': False
}
return jsonify(tarefa)
@app.route('/task', methods=['POST'])
def criar():
titulo = request.json.get('titulo')
descricao = request.json.get('descricao')
tarefa = {
'id': len(tarefas) + 1,
'titulo': titulo,
'descricao': descricao,
'estado': False
}
return jsonify(tarefa), 201
@app.route('/task', methods=['POST'])
def criar():
titulo = request.json.get('titulo')
descricao = request.json.get('descricao')
tarefa = {
'id': len(tarefas) + 1,
'titulo': titulo,
'descricao': descricao,
'estado': False
}
tarefas.append(tarefa)
return jsonify(tarefa), 201
@app.route('/task', methods=['POST'])
def criar():
titulo = request.json.get('titulo')
descricao = request.json.get('descricao')
if not descricao:
abort(404)
tarefa = {
'id': len(tarefas) + 1,
'titulo': titulo,
'descricao': descricao,
'estado': False
}
tarefas.append(tarefa)
return jsonify(tarefa), 201
@app.route('/task', methods=['POST'])
def criar():
titulo = request.json.get('titulo')
descricao = request.json.get('descricao')
if not descricao or not titulo:
abort(404)
tarefa = {
'id': len(tarefas) + 1,
'titulo': titulo,
'descricao': descricao,
'estado': False
}
tarefas.append(tarefa)
return jsonify(tarefa), 201
@app.route('/task')
def listar():
return jsonify(sorted(tarefas, key=itemgetter('estado')))
# Removendo tarefas
@app.route('/task/<int:id_tarefa>', methods=['DELETE'])
def remover(id_tarefa):
return ''
@app.route('/task/<int:id_tarefa>', methods=['DELETE'])
def remover(id_tarefa):
return '', 204
@app.route('/task/<int:id_tarefa>', methods=['DELETE'])
def remover(id_tarefa):
tarefa = [tarefa for tarefa in tarefas if tarefa['id'] == id_tarefa]
tarefas.remove(tarefa[0])
return '', 204
@app.route('/task/<int:id_tarefa>', methods=['DELETE'])
def remover(id_tarefa):
tarefa = [tarefa for tarefa in tarefas if tarefa['id'] == id_tarefa]
if not tarefa:
abort(404)
tarefas.remove(tarefa[0])
return '', 204
# Detalhando tarefas
@app.route('/task/<int:id_tarefa>', methods=['GET'])
def detalhar(id_tarefa):
tarefa = [tarefa for tarefa in tarefas if tarefa['id'] == id_tarefa]
return jsonify(tarefa[0])
@app.route('/task/<int:id_tarefa>', methods=['GET'])
def detalhar(id_tarefa):
tarefa = [tarefa for tarefa in tarefas if tarefa['id'] == id_tarefa]
if not tarefa:
abort(404)
return jsonify(tarefa[0])
# Entregando tarefas
@app.route('/tarefa/<int:id_tarefa>', methods=['PUT'])
def atualizar(id_tarefa):
tarefa = [tarefa for tarefa in tarefas if tarefa['id'] == id_tarefa]
titulo = request.json.get('titulo')
descricao = request.json.get('descricao')
estado = request.json.get('estado')
tarefa_escolhida = tarefa[0]
tarefa_escolhida['titulo'] = titulo or tarefa_escolhida['titulo']
tarefa_escolhida['descricao'] = descricao or tarefa_escolhida['descricao']
tarefa_escolhida['estado'] = estado or tarefa_escolhida['estado']
return jsonify(tarefa_escolhida)
@app.route('/tarefa/<int:id_tarefa>', methods=['PUT'])
def atualizar(id_tarefa):
tarefa = [tarefa for tarefa in tarefas if tarefa['id'] == id_tarefa]
titulo = request.json.get('titulo')
descricao = request.json.get('descricao')
entregue = request.json.get('entregue')
if not tarefa:
abort(404)
tarefa_escolhida = tarefa[0]
tarefa_escolhida['titulo'] = titulo or tarefa_escolhida['titulo']
tarefa_escolhida['descricao'] = descricao or tarefa_escolhida['descricao']
tarefa_escolhida['entregue'] = entregue or tarefa_escolhida['entregue']
return jsonify(tarefa_escolhida)
@app.route('/tarefa/<int:id_tarefa>', methods=['PUT'])
def atualizar(id_tarefa):
tarefa = [tarefa for tarefa in tarefas if tarefa['id'] == id_tarefa]
titulo = request.json.get('titulo')
descricao = request.json.get('descricao')
estado = request.json.get('estado')
if not tarefa:
abort(404)
if not descricao or not titulo or estado is None:
abort(400)
tarefa_escolhida = tarefa[0]
tarefa_escolhida['titulo'] = titulo or tarefa_escolhida['titulo']
tarefa_escolhida['descricao'] = descricao or tarefa_escolhida['descricao']
tarefa_escolhida['estado'] = estado or tarefa_escolhida['estado']
return jsonify(tarefa_escolhida)
| 27.911111
| 78
| 0.645104
| 611
| 5,024
| 5.224223
| 0.091653
| 0.062657
| 0.083333
| 0.050752
| 0.905388
| 0.893484
| 0.866855
| 0.839599
| 0.835213
| 0.835213
| 0
| 0.013386
| 0.197054
| 5,024
| 179
| 79
| 28.067039
| 0.777888
| 0.010947
| 0
| 0.859155
| 0
| 0
| 0.142397
| 0.039275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.119718
| false
| 0
| 0.014085
| 0.035211
| 0.253521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb7c53c319c3b4c2973274b5fcdd18bc449c740c
| 28,548
|
py
|
Python
|
adder.py
|
jorgebrandao/ShorAlgorithm
|
73187cb39380c2212e26a611d7cb2c2440625af8
|
[
"Apache-2.0"
] | null | null | null |
adder.py
|
jorgebrandao/ShorAlgorithm
|
73187cb39380c2212e26a611d7cb2c2440625af8
|
[
"Apache-2.0"
] | null | null | null |
adder.py
|
jorgebrandao/ShorAlgorithm
|
73187cb39380c2212e26a611d7cb2c2440625af8
|
[
"Apache-2.0"
] | null | null | null |
import sys
sys.path.append('qiskit-sdk-py-master')
from qiskit import QuantumProgram
from math import*
from qiskit.tools.visualization import plot_histogram
#####################COMO USAR AS DIFERENTES GATES######################
#.x(qr[0]) #applying x gate to the first qubit
#.y(qr[0]) #applying y gate to the first qubit
#.z(qr[0]) #applying z gate to the first qubit
#.iden(qr[0]) #identity gate on the first qubit
#.u1(lambd, qr[0]) #applying a u1 gate to the first qubit
#.u2(phi, lambd, qr[0]) #applying a u2 gate to the first qubit
#.u3(theta, phi, lambd, qr[0]) #applying a u3 gate to the first qubit
#.h(qr[0]) #applying h gate to the first qubit
#.s(qr[0]) #applying s gate to the first qubit
#.sdg(qr[0]) #applying sdg gate to the first qubit
#.t(qr[0]) #applying t gate to the first qubit
#.tdg(qr[0]) #applying tdg gate to the first qubit
#.rx(theta, qr[0]) #applying rotation around x-axis gate to the first qubit
#.ry(theta, qr[0]) #applying rotation around y-axis gate to the first qubit
#.rz(phi, qr[0]) #applying rotation around z-axis gate to the first qubit
#.cx(qr[0], qr[1]) #applying cnot gate (do 1 para o 0 (testar melhor...))
#.cy(qr[0], qr[1]) #controlled-y
#.cz(qr[0], qr[1]) #controlled-z
#.ch(qr[0], qr[1]) #controlled-h
#.crz(lambd, qr[0], qr[1]) #controlled rotation around-Z
#.cu1(lambd, qr[0], qr[1]) #controlled u1
#.cu3(theta, phi, lambd, qr[0], qr[1]) #controlled u3
#.swap(qr[0], qr[1]) #swapping the first and second qubits
#.ccx(qr[0], qr[1], qr[2]) #Toffoli gate
#.cswap(qr[0], qr[1], qr[2]) #swapping the second and third qubits controlled by the first qubit
#############################################################
######### Three qubit Quantum Fourier Transform #############
#############################################################
def qft3(g):
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 3)
cr = Q_program.create_classical_register("cr", 3)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#Entradas
#qc.h(qr[1])
qc.h(qr[2])
#QFT 3
qc.h(qr[2]) #aplica H no 1 qubit
#S controlada de 0 para 1
qc.t(qr[2])
qc.cx(qr[1], qr[2])
qc.t(qr[1])
qc.tdg(qr[2])
qc.cx(qr[1], qr[2])
#T controlada de 0 para 2
qc.u1(pi/8, qr[2]) #u1=sqrt(T)
qc.cx(qr[0], qr[2])
qc.u1(pi/8, qr[0])
qc.u1(-pi/8, qr[2])
qc.cx(qr[0], qr[2])
qc.h(qr[1]) #aplica H no 2 qubit
#S controlada de 1 para 2
qc.t(qr[1])
qc.cx(qr[0], qr[1])
qc.t(qr[0])
qc.tdg(qr[1])
qc.cx(qr[0], qr[1])
qc.h(qr[0]) #aplica H no 3 qubit
qc.swap(qr[2], qr[0]) # troca o 1 e 3 qubit
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
tmp = result.get_data("superposition")
plot_histogram(tmp['counts'])
#####
#END#
########################################################################
#############################################################
######### Three qubit Quantum Fourier Transform #############
#############################################################
def qft3u(g):
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 3)
cr = Q_program.create_classical_register("cr", 3)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#Entradas
#qc.h(qr[1])
qc.h(qr[0])
#qc.x(qr[0])
#qc.x(qr[1])
#qc.x(qr[2])
#QFT 3
qc.h(qr[2]) #aplica H no 1 qubit
#S controlada de 2 para 1
qc.u1(pi/4, qr[2])
qc.cx(qr[1], qr[2])
qc.u1(pi/4, qr[1])
qc.u1(-pi/4, qr[2])
qc.cx(qr[1], qr[2])
#T controlada de 0 para 2
qc.u1(pi/8, qr[2]) #u1=sqrt(T)
qc.cx(qr[0], qr[2])
qc.u1(pi/8, qr[0])
qc.u1(-pi/8, qr[2])
qc.cx(qr[0], qr[2])
qc.h(qr[1]) #aplica H no 2 qubit
#S controlada de 1 para 2
qc.u1(pi/4, qr[1])
qc.cx(qr[0], qr[1])
qc.u1(pi/4, qr[0])
qc.u1(-pi/4, qr[1])
qc.cx(qr[0], qr[1])
qc.h(qr[0]) #aplica H no 3 qubit
qc.swap(qr[2], qr[0]) # troca o 1 e 3 qubit
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
tmp = result.get_data("superposition")
plot_histogram(tmp['counts'])
#####
#END#
########################################################################
#############################################################
###### Three qubit Inverse Quantum Fourier Transform ########
#############################################################
def invqft3(g):
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 3)
cr = Q_program.create_classical_register("cr", 3)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#entradas
qc.h(qr[1])
qc.h(qr[2])
qc.swap(qr[2], qr[0]) # troca o 1 e 3 qubit
qc.h(qr[0])
qc.tdg(qr[1])
qc.cx(qr[0], qr[1])
qc.tdg(qr[0])
qc.t(qr[1])
qc.cx(qr[0], qr[1])
qc.h(qr[1])
qc.u1(-pi/8, qr[2])
qc.cx(qr[0], qr[2])
qc.u1(-pi/8, qr[0])
qc.u1(pi/8, qr[2])
qc.cx(qr[0], qr[2])
qc.tdg(qr[2])
qc.cx(qr[1], qr[2])
qc.tdg(qr[1])
qc.t(qr[2])
qc.cx(qr[1], qr[2])
qc.h(qr[2])
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
tmp = result.get_data("superposition")
plot_histogram(tmp['counts'])
#####
#END#
########################################################################
#############################################################
######### Four qubit Quantum Fourier Transform ##############
#############################################################
def qft4(g):
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 4)
cr = Q_program.create_classical_register("cr", 4)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#Entradas
#qc.h(qr[0])
qc.h(qr[1])
#qc.h(qr[2])
qc.h(qr[3])
#QFT 4 Qubits
qc.h(qr[3])
#Scont
qc.t(qr[3])
qc.cx(qr[2], qr[3])
qc.t(qr[2])
qc.tdg(qr[3])
qc.cx(qr[2], qr[3])
#Tcont
qc.u1(pi/8, qr[3]) #u1=sqrt(T)
qc.cx(qr[1], qr[3])
qc.u1(pi/8, qr[1])
qc.u1(-pi/8, qr[3])
qc.cx(qr[1], qr[3])
#u1cont
qc.u1(pi/16, qr[3]) #sqrt(sqrt(T))
qc.cx(qr[0], qr[3])
qc.u1(pi/16, qr[0])
qc.u1(-pi/16, qr[3])
qc.cx(qr[0], qr[3])
qc.h(qr[2])
#S controlada
qc.t(qr[2])
qc.cx(qr[1], qr[2])
qc.t(qr[1])
qc.tdg(qr[2])
qc.cx(qr[1], qr[2])
#T controlada
qc.u1(pi/8, qr[2]) #u1=sqrt(T)
qc.cx(qr[0], qr[2])
qc.u1(pi/8, qr[0])
qc.u1(-pi/8, qr[2])
qc.cx(qr[0], qr[2])
qc.h(qr[1])
#S controlada
qc.t(qr[1])
qc.cx(qr[0], qr[1])
qc.t(qr[0])
qc.tdg(qr[1])
qc.cx(qr[0], qr[1])
qc.h(qr[0])
qc.swap(qr[2], qr[1])
qc.swap(qr[3], qr[0])
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
tmp = result.get_data("superposition")
plot_histogram(tmp['counts'])
#####
#END#
########################################################################
#############################################################
########## Five qubit Quantum Fourier Transform #############
#############################################################
def qft5(g): #está certa - usei s e t
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 5)
cr = Q_program.create_classical_register("cr", 5)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#Entradas
#qc.h(qr[0])
qc.h(qr[1])
qc.h(qr[2])
qc.h(qr[3])
qc.h(qr[4])
#QFT 5 Qubits
qc.h(qr[4])
#Scont
qc.t(qr[4])
qc.cx(qr[3], qr[4])
qc.t(qr[3])
qc.tdg(qr[4])
qc.cx(qr[3], qr[4])
#Tcont
qc.u1(pi/8, qr[4]) #u1=sqrt(T)
qc.cx(qr[2], qr[4])
qc.u1(pi/8, qr[2])
qc.u1(-pi/8, qr[4])
qc.cx(qr[2], qr[4])
#sqrt(T)cont
qc.u1(pi/16, qr[4]) #sqrt(sqrt(T))
qc.cx(qr[1], qr[4])
qc.u1(pi/16, qr[1])
qc.u1(-pi/16, qr[4])
qc.cx(qr[1], qr[4])
#T**1/4cont
qc.u1(pi/32, qr[4]) #sqrt(sqrt(T))
qc.cx(qr[0], qr[4])
qc.u1(pi/32, qr[0])
qc.u1(-pi/32, qr[4])
qc.cx(qr[0], qr[4])
qc.h(qr[3])
#Scont
qc.t(qr[3])
qc.cx(qr[2], qr[3])
qc.t(qr[2])
qc.tdg(qr[3])
qc.cx(qr[2], qr[3])
#Tcont
qc.u1(pi/8, qr[3]) #u1=sqrt(T)
qc.cx(qr[1], qr[3])
qc.u1(pi/8, qr[1])
qc.u1(-pi/8, qr[3])
qc.cx(qr[1], qr[3])
#sqrt(T)cont
qc.u1(pi/16, qr[3]) #sqrt(sqrt(T))
qc.cx(qr[0], qr[3])
qc.u1(pi/16, qr[0])
qc.u1(-pi/16, qr[3])
qc.cx(qr[0], qr[3])
qc.h(qr[2])
#S controlada
qc.t(qr[2])
qc.cx(qr[1], qr[2])
qc.t(qr[1])
qc.tdg(qr[2])
qc.cx(qr[1], qr[2])
#T controlada
qc.u1(pi/8, qr[2]) #u1=sqrt(T)
qc.cx(qr[0], qr[2])
qc.u1(pi/8, qr[0])
qc.u1(-pi/8, qr[2])
qc.cx(qr[0], qr[2])
qc.h(qr[1])
#S controlada
qc.t(qr[1])
qc.cx(qr[0], qr[1])
qc.t(qr[0])
qc.tdg(qr[1])
qc.cx(qr[0], qr[1])
qc.h(qr[0])
qc.swap(qr[4], qr[0])
qc.swap(qr[3], qr[1])
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
tmp = result.get_data("superposition")
plot_histogram(tmp['counts'])
#####
#END#
########################################################################
#############################################################
###### Four qubit Inverse Quantum Fourier Transform #########
#############################################################
def invqft4(g):
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 4)
cr = Q_program.create_classical_register("cr", 4)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#Entradas
#qc.h(qr[0])
#qc.h(qr[1])
#qc.h(qr[2])
qc.h(qr[3])
qc.swap(qr[3], qr[0])
qc.swap(qr[2], qr[1])
qc.h(qr[0])
qc.tdg(qr[1])
qc.cx(qr[0], qr[1])
qc.tdg(qr[0])
qc.t(qr[1])
qc.cx(qr[0], qr[1])
qc.h(qr[1])
qc.u1(-pi/8, qr[2]) #u1=sqrt(T)
qc.cx(qr[0], qr[2])
qc.u1(-pi/8, qr[0])
qc.u1(pi/8, qr[2])
qc.cx(qr[0], qr[2])
qc.tdg(qr[2])
qc.cx(qr[1], qr[2])
qc.tdg(qr[1])
qc.t(qr[2])
qc.cx(qr[1], qr[2])
qc.h(qr[2])
#u1cont
qc.u1(-pi/16, qr[3]) #sqrt(sqrt(T))
qc.cx(qr[0], qr[3])
qc.u1(-pi/16, qr[0])
qc.u1(pi/16, qr[3])
qc.cx(qr[0], qr[3])
#Tcont
qc.u1(-pi/8, qr[3]) #u1=sqrt(T)
qc.cx(qr[1], qr[3])
qc.u1(-pi/8, qr[1])
qc.u1(pi/8, qr[3])
qc.cx(qr[1], qr[3])
#Scont
qc.tdg(qr[3])
qc.cx(qr[2], qr[3])
qc.tdg(qr[2])
qc.t(qr[3])
qc.cx(qr[2], qr[3])
qc.h(qr[3])
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
tmp = result.get_data("superposition")
plot_histogram(tmp['counts'])
#####
#END#
########################################################################
######################### Draper Adder 3+3 #############################
########################################################################
def adder33(g):
qr = Q_program.create_quantum_register("qr", 7)
cr = Q_program.create_classical_register("cr", 7)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#Entradas
#a
qc.x(qr[0])
qc.x(qr[1])
qc.x(qr[2])
#b
#qc.x(qr[3])
#qc.x(qr[4])
#qc.x(qr[5])
####qft4 de b
qc.h(qr[6])
#Scont
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.t(qr[5])
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
#Tcont
qc.u1(pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(pi/8, qr[4])
qc.u1(-pi/8, qr[6])
qc.cx(qr[4], qr[6])
#u1cont
qc.u1(pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(pi/16, qr[3])
qc.u1(-pi/16, qr[6])
qc.cx(qr[3], qr[6])
qc.h(qr[5]) #aplica H no 1 qubit
#S controlada
qc.u1(pi/4, qr[5])
qc.cx(qr[4], qr[5])
qc.u1(pi/4, qr[4])
qc.u1(-pi/4, qr[5])
qc.cx(qr[4], qr[5])
#T controlada
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(pi/8, qr[3])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.h(qr[4])
#S controlada
qc.u1(pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.u1(pi/4, qr[3])
qc.u1(-pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[3])
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
#ADDER
#u2 controlada
qc.u1(pi/4, qr[3])
qc.cx(qr[2], qr[3])
qc.u1(pi/4, qr[2])
qc.u1(-pi/4, qr[3])
qc.cx(qr[2], qr[3])
#u3 c
qc.u1(pi/8, qr[3])
qc.cx(qr[1], qr[3])
qc.u1(pi/8, qr[1])
qc.u1(-pi/8, qr[3])
qc.cx(qr[1], qr[3])
#u4 c
qc.u1(pi/16, qr[3])
qc.cx(qr[0], qr[3])
qc.u1(pi/16, qr[0])
qc.u1(-pi/16, qr[3])
qc.cx(qr[0], qr[3])
#u1 c
qc.u1(pi/2, qr[4])
qc.cx(qr[2], qr[4])
qc.u1(pi/2, qr[2])
qc.u1(-pi/2, qr[4])
qc.cx(qr[2], qr[4])
#u2 c
qc.u1(pi/4, qr[4])
qc.cx(qr[1], qr[4])
qc.u1(pi/4, qr[1])
qc.u1(-pi/4, qr[4])
qc.cx(qr[1], qr[4])
#u3 c
qc.u1(pi/8, qr[4])
qc.cx(qr[0], qr[4])
qc.u1(pi/8, qr[0])
qc.u1(-pi/8, qr[4])
qc.cx(qr[0], qr[4])
#u1 c
qc.u1(pi/2, qr[5])
qc.cx(qr[1], qr[5])
qc.u1(pi/2, qr[1])
qc.u1(-pi/2, qr[5])
qc.cx(qr[1], qr[5])
#u2 c
qc.u1(pi/4, qr[5])
qc.cx(qr[0], qr[5])
qc.u1(pi/4, qr[0])
qc.u1(-pi/4, qr[5])
qc.cx(qr[0], qr[5])
#u1 c
qc.u1(pi/2, qr[6])
qc.cx(qr[0], qr[6])
qc.u1(pi/2, qr[0])
qc.u1(-pi/2, qr[6])
qc.cx(qr[0], qr[6])
# invqft4
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
qc.h(qr[3])
qc.tdg(qr[4])
qc.cx(qr[3], qr[4])
qc.tdg(qr[3])
qc.t(qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[4])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(-pi/8, qr[3])
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.tdg(qr[5])
qc.cx(qr[4], qr[5])
qc.tdg(qr[4])
qc.t(qr[5])
qc.cx(qr[4], qr[5])
qc.h(qr[5])
#u1cont
qc.u1(-pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(-pi/16, qr[3])
qc.u1(pi/16, qr[6])
qc.cx(qr[3], qr[6])
#Tcont
qc.u1(-pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(-pi/8, qr[4])
qc.u1(pi/8, qr[6])
qc.cx(qr[4], qr[6])
#Scont
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
qc.tdg(qr[5])
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.h(qr[6])
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
tmp = result.get_data("superposition")
plot_histogram(tmp['counts'])
#####
#END#
########################################################################
###################### Draper Adder 3+3 mod n ##########################
########################################################################
def add33(g):
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 7)
cr = Q_program.create_classical_register("cr", 7)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#Entradas
#x
#qc.x(qr[0])
#qc.x(qr[1])
#qc.x(qr[2])
#b
qc.x(qr[3])
#qc.x(qr[4])
#qc.x(qr[5])
####qft4 de b
qc.h(qr[6])
#Scont
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.t(qr[5])
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
#Tcont
qc.u1(pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(pi/8, qr[4])
qc.u1(-pi/8, qr[6])
qc.cx(qr[4], qr[6])
#u1cont
qc.u1(pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(pi/16, qr[3])
qc.u1(-pi/16, qr[6])
qc.cx(qr[3], qr[6])
qc.h(qr[5]) #aplica H no 1 qubit
#S controlada
qc.u1(pi/4, qr[5])
qc.cx(qr[4], qr[5])
qc.u1(pi/4, qr[4])
qc.u1(-pi/4, qr[5])
qc.cx(qr[4], qr[5])
#T controlada
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(pi/8, qr[3])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.h(qr[4])
#S controlada
qc.u1(pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.u1(pi/4, qr[3])
qc.u1(-pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[3])
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
# ADDER (começa do menos significativo da transformada para o mais significativo do a)
#vamos admitir a=101 = 5
#u2+u4
#qc.u1(pi/2, qr[3])
#qc.u1(pi/8, qr[3])
qc.u1(5*pi/8, qr[3]) # é o mesmo que ter os dois de cima (pi/2+pi/8=5*pi/8)
#u1+u3
#qc.u1(pi, qr[4])
#qc.u1(pi/4, qr[4])
qc.u1(5*pi/4, qr[4])# é o mesmo que ter os dois de cima (pi+pi/4=5*pi/4)
#u2
qc.u1(pi/2, qr[5])
#u1
qc.u1(pi, qr[6])
#nota, só o adder funcionou com as fases mas agora precisa-se do adder controlado
# cmult vamos multiplicar a= 101 por x
# invqft4
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
qc.h(qr[3])
qc.tdg(qr[4])
qc.cx(qr[3], qr[4])
qc.tdg(qr[3])
qc.t(qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[4])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(-pi/8, qr[3])
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.tdg(qr[5])
qc.cx(qr[4], qr[5])
qc.tdg(qr[4])
qc.t(qr[5])
qc.cx(qr[4], qr[5])
qc.h(qr[5])
#u1cont
qc.u1(-pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(-pi/16, qr[3])
qc.u1(pi/16, qr[6])
qc.cx(qr[3], qr[6])
#Tcont
qc.u1(-pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(-pi/8, qr[4])
qc.u1(pi/8, qr[6])
qc.cx(qr[4], qr[6])
#Scont
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
qc.tdg(qr[5])
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.h(qr[6])
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
#tmp = result.get_data("superposition")
#plot_histogram(tmp['counts'])
#####
#END#
def addmod(g):
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 8)
cr = Q_program.create_classical_register("cr", 8)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#Entradas
#n
#qc.x(qr[0])
#qc.x(qr[1])
#qc.x(qr[2])
#b
qc.x(qr[3])
#qc.x(qr[4])
qc.x(qr[5])
####qft4 de b
qc.h(qr[6])
#Scont
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.t(qr[5])
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
#Tcont
qc.u1(pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(pi/8, qr[4])
qc.u1(-pi/8, qr[6])
qc.cx(qr[4], qr[6])
#u1cont
qc.u1(pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(pi/16, qr[3])
qc.u1(-pi/16, qr[6])
qc.cx(qr[3], qr[6])
qc.h(qr[5]) #aplica H no 1 qubit
#S controlada
qc.u1(pi/4, qr[5])
qc.cx(qr[4], qr[5])
qc.u1(pi/4, qr[4])
qc.u1(-pi/4, qr[5])
qc.cx(qr[4], qr[5])
#T controlada
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(pi/8, qr[3])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.h(qr[4])
#S controlada
qc.u1(pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.u1(pi/4, qr[3])
qc.u1(-pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[3])
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
#add a
# ADDER (começa do menos significativo da transformada para o mais significativo do a)
#vamos admitir a=101 = 5
#u2+u4
#qc.u1(pi/2, qr[3])
#qc.u1(pi/8, qr[3])
qc.u1(5*pi/8, qr[3]) # é o mesmo que ter os dois de cima (pi/2+pi/8=5*pi/8)
#u1+u3
#qc.u1(pi, qr[4])
#qc.u1(pi/4, qr[4])
qc.u1(5*pi/4, qr[4])# é o mesmo que ter os dois de cima (pi+pi/4=5*pi/4)
#u2
qc.u1(pi/2, qr[5])
#u1
qc.u1(pi, qr[6])
#subtrai n
#vamos admitir n=011 = 3 logo subtrair n é fazer conjugado
#u3+u4
#qc.u1(pi/4, qr[3])
#qc.u1(pi/8, qr[3])
qc.u1(-3*pi/8, qr[3])
#u2+u3
#qc.u1(pi/2, qr[4])
#qc.u1(pi/4, qr[4])
qc.u1(-3*pi/4, qr[4])# é o mesmo que ter os dois de cima (pi+pi/4=5*pi/4)
#u1+u2
qc.u1(-3*pi/2, qr[5])
#u1
qc.u1(-pi, qr[6])
# invqft4
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
qc.h(qr[3])
qc.tdg(qr[4])
qc.cx(qr[3], qr[4])
qc.tdg(qr[3])
qc.t(qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[4])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(-pi/8, qr[3])
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.tdg(qr[5])
qc.cx(qr[4], qr[5])
qc.tdg(qr[4])
qc.t(qr[5])
qc.cx(qr[4], qr[5])
qc.h(qr[5])
#u1cont
qc.u1(-pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(-pi/16, qr[3])
qc.u1(pi/16, qr[6])
qc.cx(qr[3], qr[6])
#Tcont
qc.u1(-pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(-pi/8, qr[4])
qc.u1(pi/8, qr[6])
qc.cx(qr[4], qr[6])
#Scont
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
qc.tdg(qr[5])
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.h(qr[6])
#cnot do ultimo bit da qft para um |0>
qc.cx(qr[6], qr[7])
#qft novamente
qc.h(qr[6])
#Scont
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.t(qr[5])
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
#Tcont
qc.u1(pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(pi/8, qr[4])
qc.u1(-pi/8, qr[6])
qc.cx(qr[4], qr[6])
#u1cont
qc.u1(pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(pi/16, qr[3])
qc.u1(-pi/16, qr[6])
qc.cx(qr[3], qr[6])
qc.h(qr[5]) #aplica H no 1 qubit
#S controlada
qc.u1(pi/4, qr[5])
qc.cx(qr[4], qr[5])
qc.u1(pi/4, qr[4])
qc.u1(-pi/4, qr[5])
qc.cx(qr[4], qr[5])
#T controlada
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(pi/8, qr[3])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.h(qr[4])
#S controlada
qc.u1(pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.u1(pi/4, qr[3])
qc.u1(-pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[3])
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
#add n controlado por 7
qc.u1(3*pi/8, qr[3])
qc.cx(qr[7], qr[3])
qc.u1(3*pi/8, qr[7])
qc.u1(-3*pi/8, qr[3])
qc.cx(qr[7], qr[3])
qc.u1(3*pi/4, qr[4])
qc.cx(qr[7], qr[4])
qc.u1(3*pi/4, qr[7])
qc.u1(-3*pi/4, qr[4])
qc.cx(qr[7], qr[4])
qc.u1(3*pi/2, qr[5])
qc.cx(qr[7], qr[5])
qc.u1(3*pi/2, qr[7])
qc.u1(-3*pi/2, qr[5])
qc.cx(qr[7], qr[5])
qc.u1(pi, qr[6])
qc.cx(qr[7], qr[6])
qc.u1(pi, qr[7])
qc.u1(-pi, qr[6])
qc.cx(qr[7], qr[6])
#subtrai a
qc.u1(-5*pi/8, qr[3])
qc.u1(-5*pi/4, qr[4])
qc.u1(-pi/2, qr[5])
qc.u1(-pi, qr[6])
#qft-1
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
qc.h(qr[3])
qc.tdg(qr[4])
qc.cx(qr[3], qr[4])
qc.tdg(qr[3])
qc.t(qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[4])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(-pi/8, qr[3])
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.tdg(qr[5])
qc.cx(qr[4], qr[5])
qc.tdg(qr[4])
qc.t(qr[5])
qc.cx(qr[4], qr[5])
qc.h(qr[5])
#u1cont
qc.u1(-pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(-pi/16, qr[3])
qc.u1(pi/16, qr[6])
qc.cx(qr[3], qr[6])
#Tcont
qc.u1(-pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(-pi/8, qr[4])
qc.u1(pi/8, qr[6])
qc.cx(qr[4], qr[6])
#Scont
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
qc.tdg(qr[5])
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.h(qr[6])
#x cnot x
qc.x(qr[6])
qc.cx(qr[7], qr[6])
qc.x(qr[6])
#qft
qc.h(qr[6])
#Scont
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.t(qr[5])
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
#Tcont
qc.u1(pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(pi/8, qr[4])
qc.u1(-pi/8, qr[6])
qc.cx(qr[4], qr[6])
#u1cont
qc.u1(pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(pi/16, qr[3])
qc.u1(-pi/16, qr[6])
qc.cx(qr[3], qr[6])
qc.h(qr[5]) #aplica H no 1 qubit
#S controlada
qc.u1(pi/4, qr[5])
qc.cx(qr[4], qr[5])
qc.u1(pi/4, qr[4])
qc.u1(-pi/4, qr[5])
qc.cx(qr[4], qr[5])
#T controlada
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(pi/8, qr[3])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.h(qr[4])
#S controlada
qc.u1(pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.u1(pi/4, qr[3])
qc.u1(-pi/4, qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[3])
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
#add a
qc.u1(5*pi/8, qr[3])
qc.u1(5*pi/4, qr[4])
qc.u1(pi/2, qr[5])
qc.u1(pi, qr[6])
#inv qft
qc.swap(qr[6], qr[3])
qc.swap(qr[5], qr[4])
qc.h(qr[3])
qc.tdg(qr[4])
qc.cx(qr[3], qr[4])
qc.tdg(qr[3])
qc.t(qr[4])
qc.cx(qr[3], qr[4])
qc.h(qr[4])
qc.u1(-pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.u1(-pi/8, qr[3])
qc.u1(pi/8, qr[5])
qc.cx(qr[3], qr[5])
qc.tdg(qr[5])
qc.cx(qr[4], qr[5])
qc.tdg(qr[4])
qc.t(qr[5])
qc.cx(qr[4], qr[5])
qc.h(qr[5])
#u1cont
qc.u1(-pi/16, qr[6]) #sqrt(sqrt(T))
qc.cx(qr[3], qr[6])
qc.u1(-pi/16, qr[3])
qc.u1(pi/16, qr[6])
qc.cx(qr[3], qr[6])
#Tcont
qc.u1(-pi/8, qr[6]) #u1=sqrt(T)
qc.cx(qr[4], qr[6])
qc.u1(-pi/8, qr[4])
qc.u1(pi/8, qr[6])
qc.cx(qr[4], qr[6])
#Scont
qc.tdg(qr[6])
qc.cx(qr[5], qr[6])
qc.tdg(qr[5])
qc.t(qr[6])
qc.cx(qr[5], qr[6])
qc.h(qr[6])
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
#tmp = result.get_data("superposition")
#plot_histogram(tmp['counts'])
#####
#END#
| 22.061824
| 108
| 0.435442
| 5,178
| 28,548
| 2.374662
| 0.037273
| 0.078725
| 0.108328
| 0.05636
| 0.924772
| 0.888988
| 0.853042
| 0.8419
| 0.835394
| 0.820755
| 0
| 0.083399
| 0.2927
| 28,548
| 1,294
| 109
| 22.061824
| 0.525555
| 0.166527
| 0
| 0.930759
| 0
| 0
| 0.035108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011984
| false
| 0
| 0.005326
| 0
| 0.01731
| 0.023968
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a548c20ddd99c626fe755a6baa5e63ff9881548c
| 84
|
py
|
Python
|
tests/others/test_metrics.py
|
lanSeFangZhou/tokenizer_tools
|
edd931ae86a6e381b57e50f8b59ae19d3151d26b
|
[
"MIT"
] | null | null | null |
tests/others/test_metrics.py
|
lanSeFangZhou/tokenizer_tools
|
edd931ae86a6e381b57e50f8b59ae19d3151d26b
|
[
"MIT"
] | null | null | null |
tests/others/test_metrics.py
|
lanSeFangZhou/tokenizer_tools
|
edd931ae86a6e381b57e50f8b59ae19d3151d26b
|
[
"MIT"
] | null | null | null |
from tokenizer_tools.metrics import correct_rate
def test_correct_rate():
pass
| 16.8
| 48
| 0.809524
| 12
| 84
| 5.333333
| 0.833333
| 0.34375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 84
| 4
| 49
| 21
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
a562f43789b5175ef73a757ebf0ee8b546a3feff
| 16,360
|
py
|
Python
|
app/apis/tests/test_reports_api.py
|
S3Infosoft/mvr-insights
|
ac73feff03c1592d5efd8e0b82f72dd4dbd3e921
|
[
"MIT"
] | null | null | null |
app/apis/tests/test_reports_api.py
|
S3Infosoft/mvr-insights
|
ac73feff03c1592d5efd8e0b82f72dd4dbd3e921
|
[
"MIT"
] | 20
|
2019-06-17T11:01:25.000Z
|
2020-05-09T06:13:17.000Z
|
app/apis/tests/test_reports_api.py
|
S3Infosoft/mvr-insights
|
ac73feff03c1592d5efd8e0b82f72dd4dbd3e921
|
[
"MIT"
] | 1
|
2020-03-03T11:13:57.000Z
|
2020-03-03T11:13:57.000Z
|
from .. import serializers
from enquiry.models import OTA, Partner, Review
from django.urls import reverse
from django.contrib.auth import get_user_model
from django.core.cache import cache
from rest_framework import status
from rest_framework.test import APITestCase
import pytz
from datetime import datetime
from unittest import mock
REPORT_URL = reverse("api_report")
CURRENT_DATE = datetime(2019, 1, 1)
def create_model_instance(model, params: dict, **kwargs):
default_time = {"year": 2019, "month": 1, "day": 1}
if kwargs:
default_time.update(kwargs)
mocked_time = datetime(**default_time, tzinfo=pytz.utc)
with mock.patch("django.utils.timezone.now",
mock.Mock(return_value=mocked_time)):
model.objects.create(**params)
class TestPublicReportAPI(APITestCase):
"""Test unauthorised request to API"""
def test_login_always_required(self):
start_date = datetime(year=CURRENT_DATE.year,
month=CURRENT_DATE.month,
day=CURRENT_DATE.day)
end_date = datetime(year=CURRENT_DATE.year,
month=CURRENT_DATE.month+2,
day=CURRENT_DATE.day)
payload = {"start_date": start_date.date(),
"end_date": end_date.date(),
"enquiry_type": "OTA"}
res = self.client.get(REPORT_URL, data=payload)
self.assertEqual(res.status_code, status.HTTP_403_FORBIDDEN)
class TestPrivateReportAPI(APITestCase):
"""Test authorized API request"""
def setUp(self):
self.user = get_user_model().objects.create_user(
"abhie@infosoft.com", "django123"
)
self.client.force_login(self.user)
def test_access_to_logged_in_user(self):
payload = {"start_date": CURRENT_DATE.date(),
"end_date": CURRENT_DATE.date(),
"enquiry_type": "OTA"}
res = self.client.get(REPORT_URL, payload)
self.assertEqual(res.status_code, status.HTTP_200_OK)
def test_error_on_incomplete_data(self):
payload = {"start_date": CURRENT_DATE.date(),
"enquiry_date": "OTA"}
res = self.client.get(REPORT_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_ota_report_is_generated_from_duration_given(self):
model_payload = {
"contact_person": "Super Saiyan",
"contact_number": "9988776655",
"contact_email": "goku@dbz.com"
}
for i in range(4):
curr_day = CURRENT_DATE.day
model_payload["name"] = f"I am Number {i}"
create_model_instance(OTA, params=model_payload, day=curr_day+i)
for i in range(1, 4):
curr_month = CURRENT_DATE.month
model_payload["name"] = f"Terminator {i}"
create_model_instance(OTA,
params=model_payload,
month=curr_month+1)
self.assertEqual(OTA.objects.count(), 7)
all_otas = OTA.objects.order_by("registration")
start_date = all_otas[2].registration.date()
end_date = all_otas[6].registration.date()
payload = {"start_date": start_date,
"end_date": end_date,
"enquiry_type": "OTA"}
otas_expected = OTA.objects.filter(registration__date__gte=start_date,
registration__date__lte=end_date)
res = self.client.get(REPORT_URL, payload)
serializer = serializers.OTASerializer(otas_expected, many=True)
url_params = [start_date.day, start_date.month, start_date.year,
end_date.day, end_date.month, end_date.year,
"OTA"]
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data["data"], serializer.data)
self.assertEqual(res.data["start_date"], start_date)
self.assertEqual(res.data["end_date"], end_date)
self.assertEqual(res.data["enquiry_type"], "OTA")
self.assertEqual(res.data["csv_url"], reverse("activity:csv",
args=url_params))
self.assertEqual(res.data["pdf_url"], reverse("activity:pdf",
args=url_params))
self.assertEqual(res.data["email_url"],
reverse("activity:report_email", args=url_params))
def test_partner_report_is_generated_from_duration_given(self):
model_payload = {
"partner_type": "TRAVEL_AGENT",
"contact_person": "Super Saiyan",
"contact_number": "9988776655",
"contact_email": "goku@dbz.com"
}
for i in range(4):
curr_day = CURRENT_DATE.day
model_payload["name"] = f"I am Number {i}"
create_model_instance(Partner, params=model_payload,
day=curr_day + i)
for i in range(1, 4):
curr_month = CURRENT_DATE.month
model_payload["name"] = f"Terminator {i}"
create_model_instance(Partner,
params=model_payload,
month=curr_month + 1)
self.assertEqual(Partner.objects.count(), 7)
all_partners = Partner.objects.order_by("created")
start_date = all_partners[2].created.date()
end_date = all_partners[6].created.date()
payload = {"start_date": start_date,
"end_date": end_date,
"enquiry_type": "PARTNER"}
partners_expected = Partner.objects.filter(
created__date__gte=start_date,
created__date__lte=end_date
)
res = self.client.get(REPORT_URL, payload)
serializer = serializers.PartnerSerializer(partners_expected,
many=True)
url_params = [start_date.day, start_date.month, start_date.year,
end_date.day, end_date.month, end_date.year,
"PARTNER"]
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data["data"], serializer.data)
self.assertEqual(res.data["start_date"], start_date)
self.assertEqual(res.data["end_date"], end_date)
self.assertEqual(res.data["enquiry_type"], "PARTNER")
self.assertEqual(res.data["csv_url"], reverse("activity:csv",
args=url_params))
self.assertEqual(res.data["pdf_url"], reverse("activity:pdf",
args=url_params))
self.assertEqual(res.data["email_url"],
reverse("activity:report_email", args=url_params))
def test_review_report_is_generated_from_duration_given(self):
model_payload = {
"source": "Secret",
"rating": 3.0,
"description": "Don't need",
"action": "JCB ki khudaai"
}
for i in range(4):
curr_day = CURRENT_DATE.day
model_payload["headline"] = f"I am Number {i}"
create_model_instance(Review, params=model_payload, day=curr_day+i)
for i in range(1, 4):
curr_month = CURRENT_DATE.month
model_payload["headline"] = f"Terminator {i}"
create_model_instance(Review,
params=model_payload,
month=curr_month+1)
self.assertEqual(Review.objects.count(), 7)
all_reviews = Review.objects.order_by("created")
start_date = all_reviews[2].created.date()
end_date = all_reviews[6].created.date()
payload = {"start_date": start_date,
"end_date": end_date,
"enquiry_type": "REVIEW"}
reviews_expected = Review.objects.filter(created__date__gte=start_date,
created__date__lte=end_date)
res = self.client.get(REPORT_URL, payload)
serializer = serializers.ReviewSerializer(reviews_expected, many=True)
url_params = [start_date.day, start_date.month, start_date.year,
end_date.day, end_date.month, end_date.year,
"REVIEW"]
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data["data"], serializer.data)
self.assertEqual(res.data["start_date"], start_date)
self.assertEqual(res.data["end_date"], end_date)
self.assertEqual(res.data["enquiry_type"], "REVIEW")
self.assertEqual(res.data["csv_url"], reverse("activity:csv",
args=url_params))
self.assertEqual(res.data["pdf_url"], reverse("activity:pdf",
args=url_params))
self.assertEqual(res.data["email_url"],
reverse("activity:report_email", args=url_params))
def test_ota_report_is_generated_from_duration_given_from_cache(self):
model_payload = {
"contact_person": "Super Saiyan",
"contact_number": "9988776655",
"contact_email": "goku@dbz.com"
}
for i in range(4):
curr_day = CURRENT_DATE.day
model_payload["name"] = f"I am Number {i}"
create_model_instance(OTA, params=model_payload, day=curr_day+i)
for i in range(1, 4):
curr_month = CURRENT_DATE.month
model_payload["name"] = f"Terminator {i}"
create_model_instance(OTA,
params=model_payload,
month=curr_month+1)
self.assertEqual(OTA.objects.count(), 7)
all_otas = OTA.objects.order_by("registration")
start_date = all_otas[2].registration.date()
end_date = all_otas[6].registration.date()
payload = {"start_date": start_date,
"end_date": end_date,
"enquiry_type": "OTA"}
otas_expected = OTA.objects.filter(registration__date__gte=start_date,
registration__date__lte=end_date)
cache.set("{}-{}-{}".format("OTA", start_date, end_date),
otas_expected)
res = self.client.get(REPORT_URL, payload)
serializer = serializers.OTASerializer(otas_expected, many=True)
url_params = [start_date.day, start_date.month, start_date.year,
end_date.day, end_date.month, end_date.year,
"OTA"]
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data["data"], serializer.data)
self.assertEqual(res.data["start_date"], start_date)
self.assertEqual(res.data["end_date"], end_date)
self.assertEqual(res.data["enquiry_type"], "OTA")
self.assertEqual(res.data["csv_url"], reverse("activity:csv",
args=url_params))
self.assertEqual(res.data["pdf_url"], reverse("activity:pdf",
args=url_params))
self.assertEqual(res.data["email_url"],
reverse("activity:report_email", args=url_params))
def test_partner_report_is_generated_from_duration_given_from_cache(self):
model_payload = {
"partner_type": "TRAVEL_AGENT",
"contact_person": "Super Saiyan",
"contact_number": "9988776655",
"contact_email": "goku@dbz.com"
}
for i in range(4):
curr_day = CURRENT_DATE.day
model_payload["name"] = f"I am Number {i}"
create_model_instance(Partner, params=model_payload,
day=curr_day + i)
for i in range(1, 4):
curr_month = CURRENT_DATE.month
model_payload["name"] = f"Terminator {i}"
create_model_instance(Partner,
params=model_payload,
month=curr_month + 1)
self.assertEqual(Partner.objects.count(), 7)
all_partners = Partner.objects.order_by("created")
start_date = all_partners[2].created.date()
end_date = all_partners[6].created.date()
payload = {"start_date": start_date,
"end_date": end_date,
"enquiry_type": "PARTNER"}
partners_expected = Partner.objects.filter(
created__date__gte=start_date,
created__date__lte=end_date
)
cache.set("{}-{}-{}".format("PARTNER", start_date, end_date),
partners_expected)
res = self.client.get(REPORT_URL, payload)
serializer = serializers.PartnerSerializer(partners_expected,
many=True)
url_params = [start_date.day, start_date.month, start_date.year,
end_date.day, end_date.month, end_date.year,
"PARTNER"]
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data["data"], serializer.data)
self.assertEqual(res.data["start_date"], start_date)
self.assertEqual(res.data["end_date"], end_date)
self.assertEqual(res.data["enquiry_type"], "PARTNER")
self.assertEqual(res.data["csv_url"], reverse("activity:csv",
args=url_params))
self.assertEqual(res.data["pdf_url"], reverse("activity:pdf",
args=url_params))
self.assertEqual(res.data["email_url"],
reverse("activity:report_email", args=url_params))
def test_review_report_is_generated_from_duration_given_from_cache(self):
model_payload = {
"source": "Secret",
"rating": 3.0,
"description": "Don't need",
"action": "JCB ki khudaai"
}
for i in range(4):
curr_day = CURRENT_DATE.day
model_payload["headline"] = f"I am Number {i}"
create_model_instance(Review, params=model_payload, day=curr_day+i)
for i in range(1, 4):
curr_month = CURRENT_DATE.month
model_payload["headline"] = f"Terminator {i}"
create_model_instance(Review,
params=model_payload,
month=curr_month+1)
self.assertEqual(Review.objects.count(), 7)
all_reviews = Review.objects.order_by("created")
start_date = all_reviews[2].created.date()
end_date = all_reviews[6].created.date()
payload = {"start_date": start_date,
"end_date": end_date,
"enquiry_type": "REVIEW"}
reviews_expected = Review.objects.filter(created__date__gte=start_date,
created__date__lte=end_date)
cache.set("{}-{}-{}".format("REVIEW", start_date, end_date),
reviews_expected)
res = self.client.get(REPORT_URL, payload)
serializer = serializers.ReviewSerializer(reviews_expected, many=True)
url_params = [start_date.day, start_date.month, start_date.year,
end_date.day, end_date.month, end_date.year,
"REVIEW"]
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data["data"], serializer.data)
self.assertEqual(res.data["start_date"], start_date)
self.assertEqual(res.data["end_date"], end_date)
self.assertEqual(res.data["enquiry_type"], "REVIEW")
self.assertEqual(res.data["csv_url"], reverse("activity:csv",
args=url_params))
self.assertEqual(res.data["pdf_url"], reverse("activity:pdf",
args=url_params))
self.assertEqual(res.data["email_url"],
reverse("activity:report_email", args=url_params))
| 40
| 79
| 0.571271
| 1,805
| 16,360
| 4.898615
| 0.090305
| 0.063108
| 0.103823
| 0.104501
| 0.882832
| 0.876385
| 0.876385
| 0.866885
| 0.861796
| 0.855915
| 0
| 0.011589
| 0.319621
| 16,360
| 408
| 80
| 40.098039
| 0.782769
| 0.003667
| 0
| 0.805732
| 0
| 0
| 0.113512
| 0.00927
| 0
| 0
| 0
| 0
| 0.181529
| 1
| 0.035032
| false
| 0
| 0.031847
| 0
| 0.073248
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a5713693a32e8bb2d43a9063cc00f35e74a8c660
| 97,785
|
py
|
Python
|
TweakApi/apis/data_source_soap_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/data_source_soap_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
TweakApi/apis/data_source_soap_api.py
|
tweak-com-public/tweak-api-client-python
|
019f86da11fdb12683d516f8f37db5d717380bcc
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
tweak-api
Tweak API to integrate with all the Tweak services. You can find out more about Tweak at <a href='https://www.tweak.com'>https://www.tweak.com</a>, #tweak.
OpenAPI spec version: 1.0.8-beta.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class DataSourceSoapApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def data_source_soaps_change_stream_get(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_change_stream_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_change_stream_get_with_http_info(**kwargs)
else:
(data) = self.data_source_soaps_change_stream_get_with_http_info(**kwargs)
return data
def data_source_soaps_change_stream_get_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_change_stream_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_change_stream_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/DataSourceSoaps/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'options' in params:
query_params['options'] = params['options']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_change_stream_post(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_change_stream_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_change_stream_post_with_http_info(**kwargs)
else:
(data) = self.data_source_soaps_change_stream_post_with_http_info(**kwargs)
return data
def data_source_soaps_change_stream_post_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_change_stream_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_change_stream_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/DataSourceSoaps/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'options' in params:
form_params.append(('options', params['options']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_count_get(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_count_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_count_get_with_http_info(**kwargs)
else:
(data) = self.data_source_soaps_count_get_with_http_info(**kwargs)
return data
def data_source_soaps_count_get_with_http_info(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_count_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_count_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/DataSourceSoaps/count'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_find_one_get(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_find_one_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_find_one_get_with_http_info(**kwargs)
else:
(data) = self.data_source_soaps_find_one_get_with_http_info(**kwargs)
return data
def data_source_soaps_find_one_get_with_http_info(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_find_one_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_find_one_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/DataSourceSoaps/findOne'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSourceSoap',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_get(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[DataSourceSoap]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_get_with_http_info(**kwargs)
else:
(data) = self.data_source_soaps_get_with_http_info(**kwargs)
return data
def data_source_soaps_get_with_http_info(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[DataSourceSoap]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/DataSourceSoaps'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DataSourceSoap]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_delete(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_delete_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_delete_with_http_info(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_delete`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_dynamic_datas_count_get(self, id, **kwargs):
"""
Counts dynamicDatas of DataSourceSoap.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_count_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_dynamic_datas_count_get_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_dynamic_datas_count_get_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_dynamic_datas_count_get_with_http_info(self, id, **kwargs):
"""
Counts dynamicDatas of DataSourceSoap.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_count_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_dynamic_datas_count_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_dynamic_datas_count_get`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/dynamicDatas/count'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_dynamic_datas_delete(self, id, **kwargs):
"""
Deletes all dynamicDatas of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_dynamic_datas_delete_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_dynamic_datas_delete_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_dynamic_datas_delete_with_http_info(self, id, **kwargs):
"""
Deletes all dynamicDatas of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_dynamic_datas_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_dynamic_datas_delete`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/dynamicDatas'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_dynamic_datas_fk_delete(self, id, fk, **kwargs):
"""
Delete a related item by id for dynamicDatas.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_fk_delete(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str fk: Foreign key for dynamicDatas (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_dynamic_datas_fk_delete_with_http_info(id, fk, **kwargs)
else:
(data) = self.data_source_soaps_id_dynamic_datas_fk_delete_with_http_info(id, fk, **kwargs)
return data
def data_source_soaps_id_dynamic_datas_fk_delete_with_http_info(self, id, fk, **kwargs):
"""
Delete a related item by id for dynamicDatas.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_fk_delete_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str fk: Foreign key for dynamicDatas (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_dynamic_datas_fk_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_dynamic_datas_fk_delete`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `data_source_soaps_id_dynamic_datas_fk_delete`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/dynamicDatas/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_dynamic_datas_fk_get(self, id, fk, **kwargs):
"""
Find a related item by id for dynamicDatas.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_fk_get(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str fk: Foreign key for dynamicDatas (required)
:return: DynamicData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_dynamic_datas_fk_get_with_http_info(id, fk, **kwargs)
else:
(data) = self.data_source_soaps_id_dynamic_datas_fk_get_with_http_info(id, fk, **kwargs)
return data
def data_source_soaps_id_dynamic_datas_fk_get_with_http_info(self, id, fk, **kwargs):
"""
Find a related item by id for dynamicDatas.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_fk_get_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str fk: Foreign key for dynamicDatas (required)
:return: DynamicData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_dynamic_datas_fk_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_dynamic_datas_fk_get`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `data_source_soaps_id_dynamic_datas_fk_get`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/dynamicDatas/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DynamicData',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_dynamic_datas_fk_put(self, id, fk, **kwargs):
"""
Update a related item by id for dynamicDatas.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_fk_put(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str fk: Foreign key for dynamicDatas (required)
:param DynamicData data:
:return: DynamicData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_dynamic_datas_fk_put_with_http_info(id, fk, **kwargs)
else:
(data) = self.data_source_soaps_id_dynamic_datas_fk_put_with_http_info(id, fk, **kwargs)
return data
def data_source_soaps_id_dynamic_datas_fk_put_with_http_info(self, id, fk, **kwargs):
"""
Update a related item by id for dynamicDatas.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_fk_put_with_http_info(id, fk, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str fk: Foreign key for dynamicDatas (required)
:param DynamicData data:
:return: DynamicData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fk', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_dynamic_datas_fk_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_dynamic_datas_fk_put`")
# verify the required parameter 'fk' is set
if ('fk' not in params) or (params['fk'] is None):
raise ValueError("Missing the required parameter `fk` when calling `data_source_soaps_id_dynamic_datas_fk_put`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/dynamicDatas/{fk}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'fk' in params:
path_params['fk'] = params['fk']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DynamicData',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_dynamic_datas_get(self, id, **kwargs):
"""
Queries dynamicDatas of DataSourceSoap.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str filter:
:return: list[DynamicData]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_dynamic_datas_get_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_dynamic_datas_get_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_dynamic_datas_get_with_http_info(self, id, **kwargs):
"""
Queries dynamicDatas of DataSourceSoap.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param str filter:
:return: list[DynamicData]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_dynamic_datas_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_dynamic_datas_get`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/dynamicDatas'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DynamicData]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_dynamic_datas_post(self, id, **kwargs):
"""
Creates a new instance in dynamicDatas of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param DynamicData data:
:return: DynamicData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_dynamic_datas_post_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_dynamic_datas_post_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_dynamic_datas_post_with_http_info(self, id, **kwargs):
"""
Creates a new instance in dynamicDatas of this model.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_dynamic_datas_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param DynamicData data:
:return: DynamicData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_dynamic_datas_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_dynamic_datas_post`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/dynamicDatas'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DynamicData',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_exists_get(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_exists_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_exists_get_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_exists_get_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_exists_get_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_exists_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_exists_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_exists_get`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/exists'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_get(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_get_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_get_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_get_with_http_info(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_get`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSourceSoap',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_head(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_head(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_head_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_head_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_head_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_head_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_head`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_patch(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_patch(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param DataSourceSoap data: An object of model property name/value pairs
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_patch_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_patch_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_patch_with_http_info(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_patch_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param DataSourceSoap data: An object of model property name/value pairs
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_patch`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSourceSoap',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_put(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param DataSourceSoap data: Model instance data
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_put_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_put_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_put_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param DataSourceSoap data: Model instance data
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_put`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSourceSoap',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_replace_post(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_replace_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param DataSourceSoap data: Model instance data
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_replace_post_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_replace_post_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_replace_post_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_replace_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param DataSourceSoap data: Model instance data
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_replace_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_replace_post`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/replace'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSourceSoap',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_id_team_get(self, id, **kwargs):
"""
Fetches belongsTo relation team.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_team_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param bool refresh:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_id_team_get_with_http_info(id, **kwargs)
else:
(data) = self.data_source_soaps_id_team_get_with_http_info(id, **kwargs)
return data
def data_source_soaps_id_team_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation team.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_id_team_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: DataSourceSoap id (required)
:param bool refresh:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_id_team_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `data_source_soaps_id_team_get`")
collection_formats = {}
resource_path = '/DataSourceSoaps/{id}/team'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Team',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def data_source_soaps_post(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param DataSourceSoap data: Model instance data
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.data_source_soaps_post_with_http_info(**kwargs)
else:
(data) = self.data_source_soaps_post_with_http_info(**kwargs)
return data
def data_source_soaps_post_with_http_info(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.data_source_soaps_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param DataSourceSoap data: Model instance data
:return: DataSourceSoap
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method data_source_soaps_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/DataSourceSoaps'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSourceSoap',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
| 41.434322
| 165
| 0.568513
| 10,125
| 97,785
| 5.248691
| 0.026667
| 0.063226
| 0.046572
| 0.039347
| 0.977909
| 0.976535
| 0.97503
| 0.968481
| 0.96453
| 0.962686
| 0
| 0.000925
| 0.347569
| 97,785
| 2,359
| 166
| 41.451886
| 0.832064
| 0.316879
| 0
| 0.837104
| 0
| 0
| 0.182388
| 0.07311
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038914
| false
| 0
| 0.006335
| 0
| 0.103167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c53f6473b60282a8f0af4f6e122325ae83acf4d
| 1,138
|
py
|
Python
|
api/permissions.py
|
megalaren/yamdb_final
|
dec967535d5d188e1dc0171cedd1fa92b330bfee
|
[
"BSD-3-Clause"
] | null | null | null |
api/permissions.py
|
megalaren/yamdb_final
|
dec967535d5d188e1dc0171cedd1fa92b330bfee
|
[
"BSD-3-Clause"
] | null | null | null |
api/permissions.py
|
megalaren/yamdb_final
|
dec967535d5d188e1dc0171cedd1fa92b330bfee
|
[
"BSD-3-Clause"
] | 1
|
2022-02-16T07:46:34.000Z
|
2022-02-16T07:46:34.000Z
|
from rest_framework import permissions
class IsAdmin(permissions.BasePermission):
def has_permission(self, request, view):
return request.user.is_authenticated and request.user.is_admin
def has_object_permission(self, request, view, obj):
return request.user.is_authenticated and request.user.is_admin
class IsAuthor(permissions.BasePermission):
def has_permission(self, request, view):
return request.user.is_authenticated
def has_object_permission(self, request, view, obj):
return obj.author == request.user
class IsModerator(permissions.BasePermission):
def has_permission(self, request, view):
return request.user.is_authenticated and request.user.is_moderator
def has_object_permission(self, request, view, obj):
return request.user.is_authenticated and request.user.is_moderator
class IsReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in permissions.SAFE_METHODS
def has_object_permission(self, request, view, obj):
return request.method in permissions.SAFE_METHODS
| 29.947368
| 74
| 0.757469
| 141
| 1,138
| 5.943262
| 0.219858
| 0.131265
| 0.139618
| 0.238663
| 0.868735
| 0.868735
| 0.868735
| 0.797136
| 0.797136
| 0.742243
| 0
| 0
| 0.16696
| 1,138
| 37
| 75
| 30.756757
| 0.883966
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.380952
| false
| 0
| 0.047619
| 0.380952
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
b1ba817c45e242afe152cc1b3e2357ee3e5f7ece
| 10,387
|
py
|
Python
|
gryphon/tests/logic/libraries/order_sliding_test.py
|
qiquanzhijia/gryphon
|
7bb2c646e638212bd1352feb1b5d21536a5b918d
|
[
"Apache-2.0"
] | 1,109
|
2019-06-20T19:23:27.000Z
|
2022-03-20T14:03:43.000Z
|
gryphon/tests/logic/libraries/order_sliding_test.py
|
qiquanzhijia/gryphon
|
7bb2c646e638212bd1352feb1b5d21536a5b918d
|
[
"Apache-2.0"
] | 63
|
2019-06-21T05:36:17.000Z
|
2021-05-26T21:08:15.000Z
|
gryphon/tests/logic/libraries/order_sliding_test.py
|
qiquanzhijia/gryphon
|
7bb2c646e638212bd1352feb1b5d21536a5b918d
|
[
"Apache-2.0"
] | 181
|
2019-06-20T19:42:05.000Z
|
2022-03-21T13:05:13.000Z
|
"""
Tests for gryphon.lib.arbitrage.
TODO:
- Test fee/profit number functionality. This is a bit more than trivial because it's
not guaranteed that our fee levels on a given exchange won't change.
"""
import pyximport; pyximport.install()
import gryphon.lib; gryphon.lib.prepare()
import unittest
import sure
from gryphon.lib import order_sliding
from gryphon.lib.exchange.exchange_order import Order
from gryphon.lib.exchange.consts import Consts
from gryphon.lib.money import Money
from gryphon.lib.exchange.bitstamp_btc_usd import BitstampBTCUSDExchange
class TestOrderSliding(unittest.TestCase):
def setUp(self):
self.bitstamp = BitstampBTCUSDExchange()
bids1 = [
Order(Money('1000', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.BID),
Order(Money('500', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.BID),
]
asks1 = [
Order(Money('1001', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.ASK),
Order(Money('1501', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.ASK),
]
self.basic_a = {'bids': bids1, 'asks': asks1}
bids2 = [
Order(Money('1000', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.BID),
Order(Money('900', 'USD'), Money('0.01', 'BTC'), self.bitstamp, Consts.BID),
Order(Money('500', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.BID),
]
asks2 = [
Order(Money('1001', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.ASK),
Order(Money('1100', 'USD'), Money('0.01', 'BTC'), self.bitstamp, Consts.BID),
Order(Money('1501', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.ASK),
]
self.basic_b = {'bids': bids2, 'asks': asks2}
def tearDown(self):
pass
def test_trivial_bid(self):
mode = Consts.BID
initial_price = Money('999', 'USD')
orderbook = self.basic_a
ignore_volume = Money('0.001', 'BTC')
jump = Money('0.01', 'USD')
max_slide = None
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('500.01', 'USD'))
def test_trivial_ask(self):
mode = Consts.ASK
initial_price = Money('1002', 'USD')
orderbook = self.basic_a
ignore_volume = Money('0.001', 'BTC')
jump = Money('0.01', 'USD')
max_slide = None
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('1500.99', 'USD'))
def test_max_slide_bid(self):
mode = Consts.BID
initial_price = Money('999', 'USD')
orderbook = self.basic_a
ignore_volume = Money('0.001', 'BTC')
jump = Money('0.01', 'USD')
max_slide = Money('100', 'USD')
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('899', 'USD'))
def test_max_slide_ask(self):
mode = Consts.ASK
initial_price = Money('1002', 'USD')
orderbook = self.basic_a
ignore_volume = Money('0.001', 'BTC')
jump = Money('0.01', 'USD')
max_slide = Money('100', 'USD')
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('1102', 'USD'))
def test_jump_bid(self):
mode = Consts.BID
initial_price = Money('999', 'USD')
orderbook = self.basic_a
ignore_volume = Money('0.001', 'BTC')
jump = Money('100', 'USD')
max_slide = None
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('600', 'USD'))
def test_jump_ask(self):
mode = Consts.ASK
initial_price = Money('1002', 'USD')
orderbook = self.basic_a
ignore_volume = Money('0.001', 'BTC')
jump = Money('100', 'USD')
max_slide = None
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('1401', 'USD'))
def test_ignore_bid(self):
mode = Consts.BID
initial_price = Money('999', 'USD')
orderbook = self.basic_b
ignore_volume = Money('0.1', 'BTC')
jump = Money('0.01', 'USD')
max_slide = None
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('500.01', 'USD'))
def test_ignore_ask(self):
mode = Consts.ASK
initial_price = Money('1002', 'USD')
orderbook = self.basic_b
ignore_volume = Money('0.1', 'BTC')
jump = Money('0.01', 'USD')
max_slide = None
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('1500.99', 'USD'))
def test_do_not_ignore_bid(self):
mode = Consts.BID
initial_price = Money('999', 'USD')
orderbook = self.basic_b
ignore_volume = Money('0.001', 'BTC')
jump = Money('0.01', 'USD')
max_slide = None
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('900.01', 'USD'))
def test_do_not_ignore_ask(self):
mode = Consts.ASK
initial_price = Money('1002', 'USD')
orderbook = self.basic_b
ignore_volume = Money('0.001', 'BTC')
jump = Money('0.01', 'USD')
max_slide = None
new_price = order_sliding.slide_order(
mode,
initial_price,
orderbook,
ignore_volume,
jump,
max_slide,
)
new_price.should.equal(Money('1099.99', 'USD'))
class TestOrderPriceLogic(unittest.TestCase):
def setUp(self):
#o = Order(Money('600', 'USD'), Money('1', 'BTC'), self.bitstamp, Consts.BID),
pass
def tearDown(self):
pass
def test_widen_bid(self):
price = Money('1', 'USD')
change = Money('0.1', 'USD')
mode = Consts.BID
result = order_sliding.widen_price(mode, price, change)
result.should.equal(Money('0.9', 'USD'))
def test_widen_ask(self):
price = Money('1', 'USD')
change = Money('0.1', 'USD')
mode = Consts.ASK
result = order_sliding.widen_price(mode, price, change)
result.should.equal(Money('1.1', 'USD'))
def test_narrow_bid(self):
price = Money('1', 'USD')
change = Money('0.1', 'USD')
mode = Consts.BID
result = order_sliding.narrow_price(mode, price, change)
result.should.equal(Money('1.1', 'USD'))
def test_narrow_ask(self):
price = Money('1', 'USD')
change = Money('0.1', 'USD')
mode = Consts.ASK
result = order_sliding.narrow_price(mode, price, change)
result.should.equal(Money('0.9', 'USD'))
def test_widen_bid_b(self):
price = Money('1e6', 'BCH')
change = Money('100', 'BCH')
mode = Consts.BID
result = order_sliding.widen_price(mode, price, change)
result.should.equal(Money('999900', 'BCH'))
def test_widen_ask_b(self):
price = Money('1e6', 'BCH')
change = Money('100', 'BCH')
mode = Consts.ASK
result = order_sliding.widen_price(mode, price, change)
result.should.equal(Money('1000100', 'BCH'))
def test_narrow_bid_b(self):
price = Money('1e6', 'BCH')
change = Money('100', 'BCH')
mode = Consts.BID
result = order_sliding.narrow_price(mode, price, change)
result.should.equal(Money('1000100', 'BCH'))
def test_narrow_ask(self):
price = Money('1e6', 'BCH')
change = Money('100', 'BCH')
mode = Consts.ASK
result = order_sliding.narrow_price(mode, price, change)
result.should.equal(Money('999900', 'BCH'))
def test_deeper_price_bid(self):
price_a = Money('1', 'USD')
price_b = Money('0.9', 'USD')
mode = Consts.BID
result = order_sliding.is_deeper_price(mode, price_a, price_b)
result.should.equal(False)
result = order_sliding.is_deeper_price(mode, price_b, price_a)
result.should.equal(True)
def test_deeper_price_ask(self):
price_a = Money('1', 'USD')
price_b = Money('0.9', 'USD')
mode = Consts.ASK
result = order_sliding.is_deeper_price(mode, price_a, price_b)
result.should.equal(True)
result = order_sliding.is_deeper_price(mode, price_b, price_a)
result.should.equal(False)
def test_deeper_price_bid_b(self):
price_a = Money('1000', 'ETH')
price_b = Money('200', 'ETH')
mode = Consts.BID
result = order_sliding.is_deeper_price(mode, price_a, price_b)
result.should.equal(False)
result = order_sliding.is_deeper_price(mode, price_b, price_a)
result.should.equal(True)
def test_deeper_price_ask_b(self):
price_a = Money('1000', 'ETH')
price_b = Money('200', 'ETH')
mode = Consts.ASK
result = order_sliding.is_deeper_price(mode, price_a, price_b)
result.should.equal(True)
result = order_sliding.is_deeper_price(mode, price_b, price_a)
result.should.equal(False)
| 27.478836
| 89
| 0.557235
| 1,247
| 10,387
| 4.44988
| 0.091419
| 0.030276
| 0.051901
| 0.041629
| 0.872409
| 0.851325
| 0.838349
| 0.837809
| 0.831862
| 0.831862
| 0
| 0.042224
| 0.309136
| 10,387
| 377
| 90
| 27.551724
| 0.731048
| 0.026283
| 0
| 0.784946
| 0
| 0
| 0.062246
| 0
| 0
| 0
| 0
| 0.002653
| 0
| 1
| 0.09319
| false
| 0.010753
| 0.032258
| 0
| 0.132616
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1c106866f09a1faf78ff5ac7909cee6c30aa75d
| 195
|
py
|
Python
|
platform/micro-services/search/source/search/api/search_for_data_book.py
|
keithachorn-intel/peoples-speech
|
b7623488dff36d343f8f5a6ead0a5a3a82f723bd
|
[
"Apache-2.0"
] | 62
|
2021-03-07T06:15:48.000Z
|
2022-03-24T18:58:57.000Z
|
platform/micro-services/search/source/search/api/search_for_data_book.py
|
keithachorn-intel/peoples-speech
|
b7623488dff36d343f8f5a6ead0a5a3a82f723bd
|
[
"Apache-2.0"
] | 59
|
2021-02-26T21:37:03.000Z
|
2022-03-24T16:57:12.000Z
|
platform/micro-services/search/source/search/api/search_for_data_book.py
|
keithachorn-intel/peoples-speech
|
b7623488dff36d343f8f5a6ead0a5a3a82f723bd
|
[
"Apache-2.0"
] | 9
|
2021-02-26T21:34:11.000Z
|
2022-02-09T04:00:50.000Z
|
from search.engine.search_engine_factory import SearchEngineFactory
def search_for_data_book(config, data_book):
return SearchEngineFactory(config).create().search_for_data_book(data_book)
| 32.5
| 79
| 0.851282
| 26
| 195
| 6
| 0.5
| 0.205128
| 0.166667
| 0.217949
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 195
| 5
| 80
| 39
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
592816c287a84cd069e5c60227cbfea779276e96
| 88
|
py
|
Python
|
engine/__init__.py
|
LBartolini/MazeSolver
|
c5358a072172b2ceeb1eab1e6896aed653c10314
|
[
"MIT"
] | null | null | null |
engine/__init__.py
|
LBartolini/MazeSolver
|
c5358a072172b2ceeb1eab1e6896aed653c10314
|
[
"MIT"
] | null | null | null |
engine/__init__.py
|
LBartolini/MazeSolver
|
c5358a072172b2ceeb1eab1e6896aed653c10314
|
[
"MIT"
] | null | null | null |
from engine.window import *
from engine.widgets import *
from engine.mainloop import *
| 17.6
| 29
| 0.784091
| 12
| 88
| 5.75
| 0.5
| 0.434783
| 0.463768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147727
| 88
| 4
| 30
| 22
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3ca8f86dc9f6205b2d9bbed753146b5a34bd6df9
| 84
|
py
|
Python
|
up/tasks/det_3d/__init__.py
|
ModelTC/EOD
|
164bff80486e9ae6a095a97667b365c46ceabd86
|
[
"Apache-2.0"
] | 196
|
2021-10-30T05:15:36.000Z
|
2022-03-30T18:43:40.000Z
|
up/tasks/det_3d/__init__.py
|
ModelTC/EOD
|
164bff80486e9ae6a095a97667b365c46ceabd86
|
[
"Apache-2.0"
] | 12
|
2021-10-30T11:33:28.000Z
|
2022-03-31T14:22:58.000Z
|
up/tasks/det_3d/__init__.py
|
ModelTC/EOD
|
164bff80486e9ae6a095a97667b365c46ceabd86
|
[
"Apache-2.0"
] | 23
|
2021-11-01T07:26:17.000Z
|
2022-03-27T05:55:37.000Z
|
from .data import * # noqa
from .models import * # noqa
from .runner import * # noqa
| 28
| 28
| 0.690476
| 12
| 84
| 4.833333
| 0.5
| 0.517241
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202381
| 84
| 3
| 29
| 28
| 0.865672
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3cda6029f997302d5a0c88591f65d255f060c0d6
| 370
|
py
|
Python
|
stardist_napari/_sample_data.py
|
tlambert03/stardist-napari
|
a8c8aa86c7472a2a943eec923c5644ac634922be
|
[
"BSD-3-Clause"
] | null | null | null |
stardist_napari/_sample_data.py
|
tlambert03/stardist-napari
|
a8c8aa86c7472a2a943eec923c5644ac634922be
|
[
"BSD-3-Clause"
] | null | null | null |
stardist_napari/_sample_data.py
|
tlambert03/stardist-napari
|
a8c8aa86c7472a2a943eec923c5644ac634922be
|
[
"BSD-3-Clause"
] | null | null | null |
def _test_image_nuclei_2d():
from stardist import data
return [(data.test_image_nuclei_2d(), {"name": "nuclei_2d"})]
def _test_image_he_2d():
from stardist import data
return [(data.test_image_he_2d(), {"name": "he_2d"})]
def _test_image_nuclei_3d():
from stardist import data
return [(data.test_image_nuclei_3d(), {"name": "nuclei_3d"})]
| 21.764706
| 65
| 0.689189
| 54
| 370
| 4.277778
| 0.240741
| 0.233766
| 0.25974
| 0.285714
| 0.601732
| 0.601732
| 0.601732
| 0.601732
| 0.601732
| 0
| 0
| 0.029126
| 0.164865
| 370
| 16
| 66
| 23.125
| 0.718447
| 0
| 0
| 0.333333
| 0
| 0
| 0.094595
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a70aa35c0e4b1bc70c0eec9cb03f3fa10e285a77
| 1,768
|
py
|
Python
|
src/addons/optimizers.py
|
schalappe/kenyan_sign_language_classification
|
a578e55c96e8eced1d23d31bb2019f8be308c899
|
[
"MIT"
] | null | null | null |
src/addons/optimizers.py
|
schalappe/kenyan_sign_language_classification
|
a578e55c96e8eced1d23d31bb2019f8be308c899
|
[
"MIT"
] | null | null | null |
src/addons/optimizers.py
|
schalappe/kenyan_sign_language_classification
|
a578e55c96e8eced1d23d31bb2019f8be308c899
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Custom optimizers
"""
import tensorflow as tf
class GCRMSprop(tf.keras.optimizers.RMSprop):
def get_gradients(self, loss, params):
# We here just provide a modified get_gradients() function since we are
# trying to just compute the centralized gradients.
grads = []
gradients = super().get_gradients()
for grad in gradients:
grad_len = len(grad.shape)
if grad_len > 1:
axis = list(range(grad_len - 1))
grad -= tf.reduce_mean(grad, axis=axis, keep_dims=True)
grads.append(grad)
return grads
class GCAdam(tf.keras.optimizers.Adam):
def get_gradients(self, loss, params):
# We here just provide a modified get_gradients() function since we are
# trying to just compute the centralized gradients.
grads = []
gradients = super().get_gradients()
for grad in gradients:
grad_len = len(grad.shape)
if grad_len > 1:
axis = list(range(grad_len - 1))
grad -= tf.reduce_mean(grad, axis=axis, keep_dims=True)
grads.append(grad)
return grads
class GCSGD(tf.keras.optimizers.SGD):
def get_gradients(self, loss, params):
# We here just provide a modified get_gradients() function since we are
# trying to just compute the centralized gradients.
grads = []
gradients = super().get_gradients()
for grad in gradients:
grad_len = len(grad.shape)
if grad_len > 1:
axis = list(range(grad_len - 1))
grad -= tf.reduce_mean(grad, axis=axis, keep_dims=True)
grads.append(grad)
return grads
| 31.017544
| 79
| 0.591629
| 219
| 1,768
| 4.666667
| 0.255708
| 0.105675
| 0.046967
| 0.055773
| 0.866928
| 0.866928
| 0.866928
| 0.866928
| 0.866928
| 0.866928
| 0
| 0.00578
| 0.315045
| 1,768
| 56
| 80
| 31.571429
| 0.83815
| 0.226244
| 0
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0
| 0.029412
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5986ab42d0fea578ff26dd12eede16c65dc6e620
| 192
|
py
|
Python
|
readthedocs/code-tabs/python/tests/test_slurm_queues_getter_with_props.py
|
xenon-middleware/xenon-tutorial
|
92e4e4037ab2bc67c8473ac4366ff41326a7a41c
|
[
"Apache-2.0"
] | 2
|
2016-06-23T09:03:34.000Z
|
2018-03-31T12:45:39.000Z
|
readthedocs/code-tabs/python/tests/test_slurm_queues_getter_with_props.py
|
NLeSC/Xenon-examples
|
92e4e4037ab2bc67c8473ac4366ff41326a7a41c
|
[
"Apache-2.0"
] | 54
|
2015-11-26T16:36:48.000Z
|
2017-08-01T12:12:51.000Z
|
readthedocs/code-tabs/python/tests/test_slurm_queues_getter_with_props.py
|
xenon-middleware/xenon-examples
|
92e4e4037ab2bc67c8473ac4366ff41326a7a41c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import pytest
from pyxenon_snippets import slurm_queues_getter_with_props
def test_slurm_queues_getter_with_props():
slurm_queues_getter_with_props.run_example()
| 19.2
| 59
| 0.84375
| 29
| 192
| 5.068966
| 0.62069
| 0.22449
| 0.346939
| 0.428571
| 0.530612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098958
| 192
| 9
| 60
| 21.333333
| 0.849711
| 0.104167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
59e9151d435a507b9ea81320e7faa31679522772
| 5,907
|
py
|
Python
|
tests/test_process_views.py
|
voxolab/voxo-dashboard
|
0593248328f0f47a4c1f00d1a10080ecc559a389
|
[
"MIT"
] | null | null | null |
tests/test_process_views.py
|
voxolab/voxo-dashboard
|
0593248328f0f47a4c1f00d1a10080ecc559a389
|
[
"MIT"
] | null | null | null |
tests/test_process_views.py
|
voxolab/voxo-dashboard
|
0593248328f0f47a4c1f00d1a10080ecc559a389
|
[
"MIT"
] | null | null | null |
import json
from flask import url_for
from voxolab.models import ProcessType
def test_add_process_with_model_should_be_ok(
app, users, media_file, client, asr_models):
token = users['user'].get_auth_token()
res = client.post(url_for('api.add_process', api_version='v1.1'),
content_type='application/json',
data=json.dumps(
{
"id": media_file.id,
"asr_model_name": asr_models["asr_model_french"].name
}),
headers=[('Authentication-Token', token)])
assert res.status_code == 200
result = json.loads(res.data.decode("utf-8"))
assert result['file_id'] == media_file.id
assert result['transcription_id'] == None
assert result['asr_model_name'] == asr_models["asr_model_french"].name
assert result['type'] == \
ProcessType.to_dict()[ProcessType.CustomModelTranscription]
def test_add_process_with_english_model_should_be_ok(
app, users, media_file, client, asr_models):
token = users['user'].get_auth_token()
res = client.post(url_for('api.add_process', api_version='v1.1'),
content_type='application/json',
data=json.dumps(
{
"id": media_file.id,
"asr_model_name": asr_models["asr_model_english"].name
}),
headers=[('Authentication-Token', token)])
assert res.status_code == 200
result = json.loads(res.data.decode("utf-8"))
assert result['file_id'] == media_file.id
assert result['transcription_id'] == None
assert result['asr_model_name'] == asr_models["asr_model_english"].name
assert result['type'] == \
ProcessType.to_dict()[ProcessType.CustomModelTranscription]
def test_add_process_should_default_to_custom_french(
app, users, media_file, client, asr_models):
token = users['user'].get_auth_token()
res = client.post(url_for('api.add_process', api_version='v1.1'),
content_type='application/json',
data=json.dumps({"id": media_file.id}),
headers=[('Authentication-Token', token)])
assert res.status_code == 200
result = json.loads(res.data.decode("utf-8"))
assert result['file_id'] == media_file.id
assert result['transcription_id'] == None
assert result['asr_model_name'] == asr_models["asr_model_french"].name
assert result['type'] == \
ProcessType.to_dict()[ProcessType.CustomModelTranscription]
def test_add_process_should_default_to_custom_english(
app, users, media_file, client, asr_models):
token = users['user'].get_auth_token()
res = client.post(url_for('api.add_process', api_version='v1.1'),
content_type='application/json',
data=json.dumps({"id": media_file.id, 'english': True}),
headers=[('Authentication-Token', token)])
assert res.status_code == 200
result = json.loads(res.data.decode("utf-8"))
assert result['file_id'] == media_file.id
assert result['transcription_id'] == None
assert result['asr_model_name'] == asr_models["asr_model_english"].name
assert result['type'] == \
ProcessType.to_dict()[ProcessType.CustomModelTranscription]
def test_add_phone_process_should_default_to_custom_french(
app, users, media_file, client, asr_models):
token = users['user'].get_auth_token()
res = client.post(url_for('api.add_process', api_version='v1.1'),
content_type='application/json',
data=json.dumps({"id": media_file.id, "phone": True}),
headers=[('Authentication-Token', token)])
assert res.status_code == 200
result = json.loads(res.data.decode("utf-8"))
assert result['file_id'] == media_file.id
assert result['transcription_id'] == None
assert result['asr_model_name'] == asr_models["asr_model_french"].name
assert result['type'] == \
ProcessType.to_dict()[ProcessType.CustomModelTranscription]
def test_add_align_process(app, users, transcription, client):
token = users['user'].get_auth_token()
res = client.post(url_for('api.add_process', api_version='v1.1'),
content_type='application/json',
data=json.dumps(
{
"id": transcription.id,
"type": ProcessType.TranscriptionAlignment
}),
headers=[('Authentication-Token', token)])
assert res.status_code == 200
result = json.loads(res.data.decode("utf-8"))
assert result['transcription_id'] == transcription.id
assert result['file_id'] == None
assert result['type'] == \
ProcessType.to_dict()[ProcessType.TranscriptionAlignment]
def test_get_process(app, users, process, media_file, client):
token = users['user'].get_auth_token()
res = client.get(
url_for('api.get_process',
api_version='v1.1',
process_id=process.id),
content_type='application/json',
headers=[('Authentication-Token', token)])
assert res.status_code == 200
result = json.loads(res.data.decode("utf-8"))
assert result['file_id'] == media_file.id
assert result['id'] == process.id
def test_delete_process(app, users, client, process):
token = users['user'].get_auth_token()
res = client.delete(
url_for('api.delete_process',
api_version='v1.1',
process_id=process.id),
content_type='application/json',
headers=[('Authentication-Token', token)])
assert res.status_code == 200
def test_delete_finished_process(app, users, client, process, finished_process):
token = users['user'].get_auth_token()
res = client.delete(
url_for('api.delete_process',
api_version='v1.1',
process_id=finished_process.id),
content_type='application/json',
headers=[('Authentication-Token', token)])
assert res.status_code == 404
| 32.635359
| 80
| 0.652616
| 732
| 5,907
| 5.004098
| 0.092896
| 0.0819
| 0.033033
| 0.039039
| 0.897625
| 0.877423
| 0.877423
| 0.865411
| 0.865411
| 0.849031
| 0
| 0.011099
| 0.206873
| 5,907
| 180
| 81
| 32.816667
| 0.770758
| 0
| 0
| 0.76378
| 0
| 0
| 0.16661
| 0
| 0
| 0
| 0
| 0
| 0.267717
| 1
| 0.070866
| false
| 0
| 0.023622
| 0
| 0.094488
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
abd8c50290a361218f09ea25069aaedc75f1c524
| 23,996
|
py
|
Python
|
sdk/python/pulumi_akamai/network_list.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-01-21T15:22:12.000Z
|
2021-08-25T14:15:29.000Z
|
sdk/python/pulumi_akamai/network_list.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 59
|
2020-08-13T14:39:36.000Z
|
2022-03-31T15:19:48.000Z
|
sdk/python/pulumi_akamai/network_list.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['NetworkListArgs', 'NetworkList']
@pulumi.input_type
class NetworkListArgs:
def __init__(__self__, *,
description: pulumi.Input[str],
mode: pulumi.Input[str],
type: pulumi.Input[str],
contract_id: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[int]] = None,
lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a NetworkList resource.
:param pulumi.Input[str] description: The description to be assigned to the network list.
:param pulumi.Input[str] mode: A string specifying the interpretation of the `list` parameter. Must be one of the following:
:param pulumi.Input[str] type: The type of the network list; must be either "IP" or "GEO".
:param pulumi.Input[str] contract_id: The contract ID of the network list. If supplied, group_id must also be supplied. The
contract_id value of an existing network list may not be modified.
:param pulumi.Input[int] group_id: The group ID of the network list. If supplied, contract_id must also be supplied. The
group_id value of an existing network list may not be modified.
:param pulumi.Input[Sequence[pulumi.Input[str]]] lists: : (Optional) A list of IP addresses or locations to be included in the list, added to an existing list, or
removed from an existing list.
:param pulumi.Input[str] name: The name to be assigned to the network list.
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "mode", mode)
pulumi.set(__self__, "type", type)
if contract_id is not None:
pulumi.set(__self__, "contract_id", contract_id)
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if lists is not None:
pulumi.set(__self__, "lists", lists)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def description(self) -> pulumi.Input[str]:
"""
The description to be assigned to the network list.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: pulumi.Input[str]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def mode(self) -> pulumi.Input[str]:
"""
A string specifying the interpretation of the `list` parameter. Must be one of the following:
"""
return pulumi.get(self, "mode")
@mode.setter
def mode(self, value: pulumi.Input[str]):
pulumi.set(self, "mode", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of the network list; must be either "IP" or "GEO".
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="contractId")
def contract_id(self) -> Optional[pulumi.Input[str]]:
"""
The contract ID of the network list. If supplied, group_id must also be supplied. The
contract_id value of an existing network list may not be modified.
"""
return pulumi.get(self, "contract_id")
@contract_id.setter
def contract_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contract_id", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[pulumi.Input[int]]:
"""
The group ID of the network list. If supplied, contract_id must also be supplied. The
group_id value of an existing network list may not be modified.
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter
def lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
: (Optional) A list of IP addresses or locations to be included in the list, added to an existing list, or
removed from an existing list.
"""
return pulumi.get(self, "lists")
@lists.setter
def lists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "lists", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name to be assigned to the network list.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _NetworkListState:
def __init__(__self__, *,
contract_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[int]] = None,
lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_list_id: Optional[pulumi.Input[str]] = None,
sync_point: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
uniqueid: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering NetworkList resources.
:param pulumi.Input[str] contract_id: The contract ID of the network list. If supplied, group_id must also be supplied. The
contract_id value of an existing network list may not be modified.
:param pulumi.Input[str] description: The description to be assigned to the network list.
:param pulumi.Input[int] group_id: The group ID of the network list. If supplied, contract_id must also be supplied. The
group_id value of an existing network list may not be modified.
:param pulumi.Input[Sequence[pulumi.Input[str]]] lists: : (Optional) A list of IP addresses or locations to be included in the list, added to an existing list, or
removed from an existing list.
:param pulumi.Input[str] mode: A string specifying the interpretation of the `list` parameter. Must be one of the following:
:param pulumi.Input[str] name: The name to be assigned to the network list.
:param pulumi.Input[str] network_list_id: The ID of the network list.
:param pulumi.Input[int] sync_point: An integer that identifies the current version of the network list; this value is incremented each time
the list is modified.
:param pulumi.Input[str] type: The type of the network list; must be either "IP" or "GEO".
:param pulumi.Input[str] uniqueid: uniqueId
"""
if contract_id is not None:
pulumi.set(__self__, "contract_id", contract_id)
if description is not None:
pulumi.set(__self__, "description", description)
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if lists is not None:
pulumi.set(__self__, "lists", lists)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if name is not None:
pulumi.set(__self__, "name", name)
if network_list_id is not None:
pulumi.set(__self__, "network_list_id", network_list_id)
if sync_point is not None:
pulumi.set(__self__, "sync_point", sync_point)
if type is not None:
pulumi.set(__self__, "type", type)
if uniqueid is not None:
pulumi.set(__self__, "uniqueid", uniqueid)
@property
@pulumi.getter(name="contractId")
def contract_id(self) -> Optional[pulumi.Input[str]]:
"""
The contract ID of the network list. If supplied, group_id must also be supplied. The
contract_id value of an existing network list may not be modified.
"""
return pulumi.get(self, "contract_id")
@contract_id.setter
def contract_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contract_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description to be assigned to the network list.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[pulumi.Input[int]]:
"""
The group ID of the network list. If supplied, contract_id must also be supplied. The
group_id value of an existing network list may not be modified.
"""
return pulumi.get(self, "group_id")
@group_id.setter
def group_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "group_id", value)
@property
@pulumi.getter
def lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
: (Optional) A list of IP addresses or locations to be included in the list, added to an existing list, or
removed from an existing list.
"""
return pulumi.get(self, "lists")
@lists.setter
def lists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "lists", value)
@property
@pulumi.getter
def mode(self) -> Optional[pulumi.Input[str]]:
"""
A string specifying the interpretation of the `list` parameter. Must be one of the following:
"""
return pulumi.get(self, "mode")
@mode.setter
def mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mode", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name to be assigned to the network list.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkListId")
def network_list_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the network list.
"""
return pulumi.get(self, "network_list_id")
@network_list_id.setter
def network_list_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_list_id", value)
@property
@pulumi.getter(name="syncPoint")
def sync_point(self) -> Optional[pulumi.Input[int]]:
"""
An integer that identifies the current version of the network list; this value is incremented each time
the list is modified.
"""
return pulumi.get(self, "sync_point")
@sync_point.setter
def sync_point(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "sync_point", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the network list; must be either "IP" or "GEO".
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def uniqueid(self) -> Optional[pulumi.Input[str]]:
"""
uniqueId
"""
return pulumi.get(self, "uniqueid")
@uniqueid.setter
def uniqueid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uniqueid", value)
class NetworkList(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
contract_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[int]] = None,
lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Use the `NetworkList` resource to create a network list, or to modify an existing list.
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
network_list = akamai.NetworkList("networkList",
type="IP",
description="network list description",
lists=var["list"],
mode="APPEND",
contract_id="ABC-123",
group_id=12345)
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] contract_id: The contract ID of the network list. If supplied, group_id must also be supplied. The
contract_id value of an existing network list may not be modified.
:param pulumi.Input[str] description: The description to be assigned to the network list.
:param pulumi.Input[int] group_id: The group ID of the network list. If supplied, contract_id must also be supplied. The
group_id value of an existing network list may not be modified.
:param pulumi.Input[Sequence[pulumi.Input[str]]] lists: : (Optional) A list of IP addresses or locations to be included in the list, added to an existing list, or
removed from an existing list.
:param pulumi.Input[str] mode: A string specifying the interpretation of the `list` parameter. Must be one of the following:
:param pulumi.Input[str] name: The name to be assigned to the network list.
:param pulumi.Input[str] type: The type of the network list; must be either "IP" or "GEO".
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: NetworkListArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Use the `NetworkList` resource to create a network list, or to modify an existing list.
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
network_list = akamai.NetworkList("networkList",
type="IP",
description="network list description",
lists=var["list"],
mode="APPEND",
contract_id="ABC-123",
group_id=12345)
```
:param str resource_name: The name of the resource.
:param NetworkListArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(NetworkListArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
contract_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[int]] = None,
lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = NetworkListArgs.__new__(NetworkListArgs)
__props__.__dict__["contract_id"] = contract_id
if description is None and not opts.urn:
raise TypeError("Missing required property 'description'")
__props__.__dict__["description"] = description
__props__.__dict__["group_id"] = group_id
__props__.__dict__["lists"] = lists
if mode is None and not opts.urn:
raise TypeError("Missing required property 'mode'")
__props__.__dict__["mode"] = mode
__props__.__dict__["name"] = name
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
__props__.__dict__["network_list_id"] = None
__props__.__dict__["sync_point"] = None
__props__.__dict__["uniqueid"] = None
super(NetworkList, __self__).__init__(
'akamai:index/networkList:NetworkList',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
contract_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
group_id: Optional[pulumi.Input[int]] = None,
lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_list_id: Optional[pulumi.Input[str]] = None,
sync_point: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
uniqueid: Optional[pulumi.Input[str]] = None) -> 'NetworkList':
"""
Get an existing NetworkList resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] contract_id: The contract ID of the network list. If supplied, group_id must also be supplied. The
contract_id value of an existing network list may not be modified.
:param pulumi.Input[str] description: The description to be assigned to the network list.
:param pulumi.Input[int] group_id: The group ID of the network list. If supplied, contract_id must also be supplied. The
group_id value of an existing network list may not be modified.
:param pulumi.Input[Sequence[pulumi.Input[str]]] lists: : (Optional) A list of IP addresses or locations to be included in the list, added to an existing list, or
removed from an existing list.
:param pulumi.Input[str] mode: A string specifying the interpretation of the `list` parameter. Must be one of the following:
:param pulumi.Input[str] name: The name to be assigned to the network list.
:param pulumi.Input[str] network_list_id: The ID of the network list.
:param pulumi.Input[int] sync_point: An integer that identifies the current version of the network list; this value is incremented each time
the list is modified.
:param pulumi.Input[str] type: The type of the network list; must be either "IP" or "GEO".
:param pulumi.Input[str] uniqueid: uniqueId
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _NetworkListState.__new__(_NetworkListState)
__props__.__dict__["contract_id"] = contract_id
__props__.__dict__["description"] = description
__props__.__dict__["group_id"] = group_id
__props__.__dict__["lists"] = lists
__props__.__dict__["mode"] = mode
__props__.__dict__["name"] = name
__props__.__dict__["network_list_id"] = network_list_id
__props__.__dict__["sync_point"] = sync_point
__props__.__dict__["type"] = type
__props__.__dict__["uniqueid"] = uniqueid
return NetworkList(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="contractId")
def contract_id(self) -> pulumi.Output[Optional[str]]:
"""
The contract ID of the network list. If supplied, group_id must also be supplied. The
contract_id value of an existing network list may not be modified.
"""
return pulumi.get(self, "contract_id")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
The description to be assigned to the network list.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="groupId")
def group_id(self) -> pulumi.Output[Optional[int]]:
"""
The group ID of the network list. If supplied, contract_id must also be supplied. The
group_id value of an existing network list may not be modified.
"""
return pulumi.get(self, "group_id")
@property
@pulumi.getter
def lists(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
: (Optional) A list of IP addresses or locations to be included in the list, added to an existing list, or
removed from an existing list.
"""
return pulumi.get(self, "lists")
@property
@pulumi.getter
def mode(self) -> pulumi.Output[str]:
"""
A string specifying the interpretation of the `list` parameter. Must be one of the following:
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name to be assigned to the network list.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkListId")
def network_list_id(self) -> pulumi.Output[str]:
"""
The ID of the network list.
"""
return pulumi.get(self, "network_list_id")
@property
@pulumi.getter(name="syncPoint")
def sync_point(self) -> pulumi.Output[int]:
"""
An integer that identifies the current version of the network list; this value is incremented each time
the list is modified.
"""
return pulumi.get(self, "sync_point")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the network list; must be either "IP" or "GEO".
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def uniqueid(self) -> pulumi.Output[str]:
"""
uniqueId
"""
return pulumi.get(self, "uniqueid")
| 41.659722
| 170
| 0.625729
| 2,995
| 23,996
| 4.839065
| 0.059766
| 0.095632
| 0.08887
| 0.066791
| 0.869109
| 0.837991
| 0.809011
| 0.773684
| 0.765473
| 0.744359
| 0
| 0.000974
| 0.272754
| 23,996
| 575
| 171
| 41.732174
| 0.829523
| 0.358018
| 0
| 0.681529
| 1
| 0
| 0.075397
| 0.002588
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16242
| false
| 0.003185
| 0.015924
| 0
| 0.27707
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e621bd211bc4698551929049b8016f7376ab9d60
| 139,362
|
py
|
Python
|
lib/emq/queue/QueueService.py
|
GeorryHuang/galaxy-sdk-python
|
265d56709b5ea125fbc89e4bfc05e5e8e8d9fce4
|
[
"Apache-2.0"
] | 17
|
2015-06-18T07:17:50.000Z
|
2019-11-20T07:23:33.000Z
|
lib/emq/queue/QueueService.py
|
GeorryHuang/galaxy-sdk-python
|
265d56709b5ea125fbc89e4bfc05e5e8e8d9fce4
|
[
"Apache-2.0"
] | 6
|
2017-06-08T08:37:55.000Z
|
2020-12-30T10:08:49.000Z
|
lib/emq/queue/QueueService.py
|
GeorryHuang/galaxy-sdk-python
|
265d56709b5ea125fbc89e4bfc05e5e8e8d9fce4
|
[
"Apache-2.0"
] | 19
|
2016-09-21T17:34:59.000Z
|
2020-07-18T22:35:15.000Z
|
# encoding: utf-8
#
# Autogenerated by Thrift Compiler (0.9.2)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import emq.common.EMQBaseService
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(emq.common.EMQBaseService.Iface):
def createQueue(self, request):
"""
Create queue;
Parameters:
- request
"""
pass
def deleteQueue(self, request):
"""
Delete queue;
Parameters:
- request
"""
pass
def purgeQueue(self, request):
"""
Purge queue;
Parameters:
- request
"""
pass
def setQueueAttribute(self, request):
"""
Set queue attribute;
Parameters:
- request
"""
pass
def setQueueQuota(self, request):
"""
Set queue quota;
Parameters:
- request
"""
pass
def getQueueInfo(self, request):
"""
Get queue info, include QueueAttribute, QueueState and QueueQuota;
Parameters:
- request
"""
pass
def listQueue(self, request):
"""
List queue with queueNamePrefix;
Parameters:
- request
"""
pass
def setQueueRedrivePolicy(self, request):
"""
Remove queue redrive policy;
Parameters:
- request
"""
pass
def removeQueueRedrivePolicy(self, request):
"""
Remove queue redrive policy;
Parameters:
- request
"""
pass
def setPermission(self, request):
"""
Set permission for developer
FULL_CONTROL required to use this method
Parameters:
- request
"""
pass
def revokePermission(self, request):
"""
Revoke permission for developer
FULL_CONTROL required to use this method
Parameters:
- request
"""
pass
def queryPermission(self, request):
"""
query permission for developer using this method
no permission required to use this method
Parameters:
- request
"""
pass
def queryPermissionForId(self, request):
"""
List permission for developer
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
pass
def listPermissions(self, request):
"""
list permissions for all users of the queue
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
pass
def createTag(self, request):
"""
create tag for queue
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
pass
def deleteTag(self, request):
"""
delete tag for queue
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
pass
def getTagInfo(self, request):
"""
get info of tag
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
pass
def listTag(self, request):
"""
list names of all tag of queue
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
pass
def queryMetric(self, request):
"""
query metrics
FULL_CONTROL required to use this method
Parameters:
- request
"""
pass
def queryPrivilegedQueue(self, request):
"""
query privileged queues
No permission required
Parameters:
- request
"""
pass
def verifyEMQAdmin(self):
pass
def copyQueue(self, request):
"""
copy queues using queues meta
Parameters:
- request
"""
pass
def getQueueMeta(self, queueName):
"""
Parameters:
- queueName
"""
pass
class Client(emq.common.EMQBaseService.Client, Iface):
def __init__(self, iprot, oprot=None):
emq.common.EMQBaseService.Client.__init__(self, iprot, oprot)
def createQueue(self, request):
"""
Create queue;
Parameters:
- request
"""
self.send_createQueue(request)
return self.recv_createQueue()
def send_createQueue(self, request):
self._oprot.writeMessageBegin('createQueue', TMessageType.CALL, self._seqid)
args = createQueue_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createQueue(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = createQueue_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "createQueue failed: unknown result");
def deleteQueue(self, request):
"""
Delete queue;
Parameters:
- request
"""
self.send_deleteQueue(request)
self.recv_deleteQueue()
def send_deleteQueue(self, request):
self._oprot.writeMessageBegin('deleteQueue', TMessageType.CALL, self._seqid)
args = deleteQueue_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteQueue(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deleteQueue_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def purgeQueue(self, request):
"""
Purge queue;
Parameters:
- request
"""
self.send_purgeQueue(request)
self.recv_purgeQueue()
def send_purgeQueue(self, request):
self._oprot.writeMessageBegin('purgeQueue', TMessageType.CALL, self._seqid)
args = purgeQueue_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_purgeQueue(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = purgeQueue_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def setQueueAttribute(self, request):
"""
Set queue attribute;
Parameters:
- request
"""
self.send_setQueueAttribute(request)
return self.recv_setQueueAttribute()
def send_setQueueAttribute(self, request):
self._oprot.writeMessageBegin('setQueueAttribute', TMessageType.CALL, self._seqid)
args = setQueueAttribute_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setQueueAttribute(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = setQueueAttribute_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "setQueueAttribute failed: unknown result");
def setQueueQuota(self, request):
"""
Set queue quota;
Parameters:
- request
"""
self.send_setQueueQuota(request)
return self.recv_setQueueQuota()
def send_setQueueQuota(self, request):
self._oprot.writeMessageBegin('setQueueQuota', TMessageType.CALL, self._seqid)
args = setQueueQuota_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setQueueQuota(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = setQueueQuota_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "setQueueQuota failed: unknown result");
def getQueueInfo(self, request):
"""
Get queue info, include QueueAttribute, QueueState and QueueQuota;
Parameters:
- request
"""
self.send_getQueueInfo(request)
return self.recv_getQueueInfo()
def send_getQueueInfo(self, request):
self._oprot.writeMessageBegin('getQueueInfo', TMessageType.CALL, self._seqid)
args = getQueueInfo_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getQueueInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getQueueInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getQueueInfo failed: unknown result");
def listQueue(self, request):
"""
List queue with queueNamePrefix;
Parameters:
- request
"""
self.send_listQueue(request)
return self.recv_listQueue()
def send_listQueue(self, request):
self._oprot.writeMessageBegin('listQueue', TMessageType.CALL, self._seqid)
args = listQueue_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listQueue(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = listQueue_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "listQueue failed: unknown result");
def setQueueRedrivePolicy(self, request):
"""
Remove queue redrive policy;
Parameters:
- request
"""
self.send_setQueueRedrivePolicy(request)
return self.recv_setQueueRedrivePolicy()
def send_setQueueRedrivePolicy(self, request):
self._oprot.writeMessageBegin('setQueueRedrivePolicy', TMessageType.CALL, self._seqid)
args = setQueueRedrivePolicy_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setQueueRedrivePolicy(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = setQueueRedrivePolicy_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "setQueueRedrivePolicy failed: unknown result");
def removeQueueRedrivePolicy(self, request):
"""
Remove queue redrive policy;
Parameters:
- request
"""
self.send_removeQueueRedrivePolicy(request)
self.recv_removeQueueRedrivePolicy()
def send_removeQueueRedrivePolicy(self, request):
self._oprot.writeMessageBegin('removeQueueRedrivePolicy', TMessageType.CALL, self._seqid)
args = removeQueueRedrivePolicy_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_removeQueueRedrivePolicy(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = removeQueueRedrivePolicy_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def setPermission(self, request):
"""
Set permission for developer
FULL_CONTROL required to use this method
Parameters:
- request
"""
self.send_setPermission(request)
self.recv_setPermission()
def send_setPermission(self, request):
self._oprot.writeMessageBegin('setPermission', TMessageType.CALL, self._seqid)
args = setPermission_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setPermission(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = setPermission_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def revokePermission(self, request):
"""
Revoke permission for developer
FULL_CONTROL required to use this method
Parameters:
- request
"""
self.send_revokePermission(request)
self.recv_revokePermission()
def send_revokePermission(self, request):
self._oprot.writeMessageBegin('revokePermission', TMessageType.CALL, self._seqid)
args = revokePermission_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_revokePermission(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = revokePermission_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def queryPermission(self, request):
"""
query permission for developer using this method
no permission required to use this method
Parameters:
- request
"""
self.send_queryPermission(request)
return self.recv_queryPermission()
def send_queryPermission(self, request):
self._oprot.writeMessageBegin('queryPermission', TMessageType.CALL, self._seqid)
args = queryPermission_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_queryPermission(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = queryPermission_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "queryPermission failed: unknown result");
def queryPermissionForId(self, request):
"""
List permission for developer
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
self.send_queryPermissionForId(request)
return self.recv_queryPermissionForId()
def send_queryPermissionForId(self, request):
self._oprot.writeMessageBegin('queryPermissionForId', TMessageType.CALL, self._seqid)
args = queryPermissionForId_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_queryPermissionForId(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = queryPermissionForId_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "queryPermissionForId failed: unknown result");
def listPermissions(self, request):
"""
list permissions for all users of the queue
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
self.send_listPermissions(request)
return self.recv_listPermissions()
def send_listPermissions(self, request):
self._oprot.writeMessageBegin('listPermissions', TMessageType.CALL, self._seqid)
args = listPermissions_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listPermissions(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = listPermissions_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "listPermissions failed: unknown result");
def createTag(self, request):
"""
create tag for queue
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
self.send_createTag(request)
return self.recv_createTag()
def send_createTag(self, request):
self._oprot.writeMessageBegin('createTag', TMessageType.CALL, self._seqid)
args = createTag_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createTag(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = createTag_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "createTag failed: unknown result");
def deleteTag(self, request):
"""
delete tag for queue
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
self.send_deleteTag(request)
self.recv_deleteTag()
def send_deleteTag(self, request):
self._oprot.writeMessageBegin('deleteTag', TMessageType.CALL, self._seqid)
args = deleteTag_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteTag(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deleteTag_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def getTagInfo(self, request):
"""
get info of tag
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
self.send_getTagInfo(request)
return self.recv_getTagInfo()
def send_getTagInfo(self, request):
self._oprot.writeMessageBegin('getTagInfo', TMessageType.CALL, self._seqid)
args = getTagInfo_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTagInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getTagInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTagInfo failed: unknown result");
def listTag(self, request):
"""
list names of all tag of queue
ADMIN_QUEUE required to use this method
Parameters:
- request
"""
self.send_listTag(request)
return self.recv_listTag()
def send_listTag(self, request):
self._oprot.writeMessageBegin('listTag', TMessageType.CALL, self._seqid)
args = listTag_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listTag(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = listTag_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "listTag failed: unknown result");
def queryMetric(self, request):
"""
query metrics
FULL_CONTROL required to use this method
Parameters:
- request
"""
self.send_queryMetric(request)
return self.recv_queryMetric()
def send_queryMetric(self, request):
self._oprot.writeMessageBegin('queryMetric', TMessageType.CALL, self._seqid)
args = queryMetric_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_queryMetric(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = queryMetric_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "queryMetric failed: unknown result");
def queryPrivilegedQueue(self, request):
"""
query privileged queues
No permission required
Parameters:
- request
"""
self.send_queryPrivilegedQueue(request)
return self.recv_queryPrivilegedQueue()
def send_queryPrivilegedQueue(self, request):
self._oprot.writeMessageBegin('queryPrivilegedQueue', TMessageType.CALL, self._seqid)
args = queryPrivilegedQueue_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_queryPrivilegedQueue(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = queryPrivilegedQueue_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "queryPrivilegedQueue failed: unknown result");
def verifyEMQAdmin(self):
self.send_verifyEMQAdmin()
return self.recv_verifyEMQAdmin()
def send_verifyEMQAdmin(self):
self._oprot.writeMessageBegin('verifyEMQAdmin', TMessageType.CALL, self._seqid)
args = verifyEMQAdmin_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_verifyEMQAdmin(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = verifyEMQAdmin_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "verifyEMQAdmin failed: unknown result");
def copyQueue(self, request):
"""
copy queues using queues meta
Parameters:
- request
"""
self.send_copyQueue(request)
self.recv_copyQueue()
def send_copyQueue(self, request):
self._oprot.writeMessageBegin('copyQueue', TMessageType.CALL, self._seqid)
args = copyQueue_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_copyQueue(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = copyQueue_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def getQueueMeta(self, queueName):
"""
Parameters:
- queueName
"""
self.send_getQueueMeta(queueName)
return self.recv_getQueueMeta()
def send_getQueueMeta(self, queueName):
self._oprot.writeMessageBegin('getQueueMeta', TMessageType.CALL, self._seqid)
args = getQueueMeta_args()
args.queueName = queueName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getQueueMeta(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getQueueMeta_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getQueueMeta failed: unknown result");
class Processor(emq.common.EMQBaseService.Processor, Iface, TProcessor):
def __init__(self, handler):
emq.common.EMQBaseService.Processor.__init__(self, handler)
self._processMap["createQueue"] = Processor.process_createQueue
self._processMap["deleteQueue"] = Processor.process_deleteQueue
self._processMap["purgeQueue"] = Processor.process_purgeQueue
self._processMap["setQueueAttribute"] = Processor.process_setQueueAttribute
self._processMap["setQueueQuota"] = Processor.process_setQueueQuota
self._processMap["getQueueInfo"] = Processor.process_getQueueInfo
self._processMap["listQueue"] = Processor.process_listQueue
self._processMap["setQueueRedrivePolicy"] = Processor.process_setQueueRedrivePolicy
self._processMap["removeQueueRedrivePolicy"] = Processor.process_removeQueueRedrivePolicy
self._processMap["setPermission"] = Processor.process_setPermission
self._processMap["revokePermission"] = Processor.process_revokePermission
self._processMap["queryPermission"] = Processor.process_queryPermission
self._processMap["queryPermissionForId"] = Processor.process_queryPermissionForId
self._processMap["listPermissions"] = Processor.process_listPermissions
self._processMap["createTag"] = Processor.process_createTag
self._processMap["deleteTag"] = Processor.process_deleteTag
self._processMap["getTagInfo"] = Processor.process_getTagInfo
self._processMap["listTag"] = Processor.process_listTag
self._processMap["queryMetric"] = Processor.process_queryMetric
self._processMap["queryPrivilegedQueue"] = Processor.process_queryPrivilegedQueue
self._processMap["verifyEMQAdmin"] = Processor.process_verifyEMQAdmin
self._processMap["copyQueue"] = Processor.process_copyQueue
self._processMap["getQueueMeta"] = Processor.process_getQueueMeta
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_createQueue(self, seqid, iprot, oprot):
args = createQueue_args()
args.read(iprot)
iprot.readMessageEnd()
result = createQueue_result()
try:
result.success = self._handler.createQueue(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("createQueue", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteQueue(self, seqid, iprot, oprot):
args = deleteQueue_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteQueue_result()
try:
self._handler.deleteQueue(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("deleteQueue", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_purgeQueue(self, seqid, iprot, oprot):
args = purgeQueue_args()
args.read(iprot)
iprot.readMessageEnd()
result = purgeQueue_result()
try:
self._handler.purgeQueue(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("purgeQueue", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setQueueAttribute(self, seqid, iprot, oprot):
args = setQueueAttribute_args()
args.read(iprot)
iprot.readMessageEnd()
result = setQueueAttribute_result()
try:
result.success = self._handler.setQueueAttribute(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("setQueueAttribute", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setQueueQuota(self, seqid, iprot, oprot):
args = setQueueQuota_args()
args.read(iprot)
iprot.readMessageEnd()
result = setQueueQuota_result()
try:
result.success = self._handler.setQueueQuota(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("setQueueQuota", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getQueueInfo(self, seqid, iprot, oprot):
args = getQueueInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getQueueInfo_result()
try:
result.success = self._handler.getQueueInfo(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("getQueueInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listQueue(self, seqid, iprot, oprot):
args = listQueue_args()
args.read(iprot)
iprot.readMessageEnd()
result = listQueue_result()
try:
result.success = self._handler.listQueue(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("listQueue", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setQueueRedrivePolicy(self, seqid, iprot, oprot):
args = setQueueRedrivePolicy_args()
args.read(iprot)
iprot.readMessageEnd()
result = setQueueRedrivePolicy_result()
try:
result.success = self._handler.setQueueRedrivePolicy(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("setQueueRedrivePolicy", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_removeQueueRedrivePolicy(self, seqid, iprot, oprot):
args = removeQueueRedrivePolicy_args()
args.read(iprot)
iprot.readMessageEnd()
result = removeQueueRedrivePolicy_result()
try:
self._handler.removeQueueRedrivePolicy(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("removeQueueRedrivePolicy", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setPermission(self, seqid, iprot, oprot):
args = setPermission_args()
args.read(iprot)
iprot.readMessageEnd()
result = setPermission_result()
try:
self._handler.setPermission(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("setPermission", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_revokePermission(self, seqid, iprot, oprot):
args = revokePermission_args()
args.read(iprot)
iprot.readMessageEnd()
result = revokePermission_result()
try:
self._handler.revokePermission(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("revokePermission", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_queryPermission(self, seqid, iprot, oprot):
args = queryPermission_args()
args.read(iprot)
iprot.readMessageEnd()
result = queryPermission_result()
try:
result.success = self._handler.queryPermission(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("queryPermission", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_queryPermissionForId(self, seqid, iprot, oprot):
args = queryPermissionForId_args()
args.read(iprot)
iprot.readMessageEnd()
result = queryPermissionForId_result()
try:
result.success = self._handler.queryPermissionForId(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("queryPermissionForId", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listPermissions(self, seqid, iprot, oprot):
args = listPermissions_args()
args.read(iprot)
iprot.readMessageEnd()
result = listPermissions_result()
try:
result.success = self._handler.listPermissions(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("listPermissions", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_createTag(self, seqid, iprot, oprot):
args = createTag_args()
args.read(iprot)
iprot.readMessageEnd()
result = createTag_result()
try:
result.success = self._handler.createTag(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("createTag", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteTag(self, seqid, iprot, oprot):
args = deleteTag_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteTag_result()
try:
self._handler.deleteTag(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("deleteTag", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTagInfo(self, seqid, iprot, oprot):
args = getTagInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTagInfo_result()
try:
result.success = self._handler.getTagInfo(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("getTagInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listTag(self, seqid, iprot, oprot):
args = listTag_args()
args.read(iprot)
iprot.readMessageEnd()
result = listTag_result()
try:
result.success = self._handler.listTag(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("listTag", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_queryMetric(self, seqid, iprot, oprot):
args = queryMetric_args()
args.read(iprot)
iprot.readMessageEnd()
result = queryMetric_result()
try:
result.success = self._handler.queryMetric(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("queryMetric", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_queryPrivilegedQueue(self, seqid, iprot, oprot):
args = queryPrivilegedQueue_args()
args.read(iprot)
iprot.readMessageEnd()
result = queryPrivilegedQueue_result()
try:
result.success = self._handler.queryPrivilegedQueue(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("queryPrivilegedQueue", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_verifyEMQAdmin(self, seqid, iprot, oprot):
args = verifyEMQAdmin_args()
args.read(iprot)
iprot.readMessageEnd()
result = verifyEMQAdmin_result()
try:
result.success = self._handler.verifyEMQAdmin()
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("verifyEMQAdmin", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_copyQueue(self, seqid, iprot, oprot):
args = copyQueue_args()
args.read(iprot)
iprot.readMessageEnd()
result = copyQueue_result()
try:
self._handler.copyQueue(args.request)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("copyQueue", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getQueueMeta(self, seqid, iprot, oprot):
args = getQueueMeta_args()
args.read(iprot)
iprot.readMessageEnd()
result = getQueueMeta_result()
try:
result.success = self._handler.getQueueMeta(args.queueName)
except emq.common.ttypes.GalaxyEmqServiceException, e:
result.e = e
oprot.writeMessageBegin("getQueueMeta", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class createQueue_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (CreateQueueRequest, CreateQueueRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = CreateQueueRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createQueue_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createQueue_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (CreateQueueResponse, CreateQueueResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = CreateQueueResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createQueue_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteQueue_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (DeleteQueueRequest, DeleteQueueRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = DeleteQueueRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteQueue_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteQueue_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteQueue_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class purgeQueue_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (PurgeQueueRequest, PurgeQueueRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = PurgeQueueRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('purgeQueue_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class purgeQueue_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('purgeQueue_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setQueueAttribute_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (SetQueueAttributesRequest, SetQueueAttributesRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = SetQueueAttributesRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setQueueAttribute_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setQueueAttribute_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SetQueueAttributesResponse, SetQueueAttributesResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SetQueueAttributesResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setQueueAttribute_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setQueueQuota_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (SetQueueQuotaRequest, SetQueueQuotaRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = SetQueueQuotaRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setQueueQuota_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setQueueQuota_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SetQueueQuotaResponse, SetQueueQuotaResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SetQueueQuotaResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setQueueQuota_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getQueueInfo_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (GetQueueInfoRequest, GetQueueInfoRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = GetQueueInfoRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getQueueInfo_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getQueueInfo_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GetQueueInfoResponse, GetQueueInfoResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GetQueueInfoResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getQueueInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listQueue_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (ListQueueRequest, ListQueueRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = ListQueueRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listQueue_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listQueue_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ListQueueResponse, ListQueueResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ListQueueResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listQueue_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setQueueRedrivePolicy_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (SetQueueRedrivePolicyRequest, SetQueueRedrivePolicyRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = SetQueueRedrivePolicyRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setQueueRedrivePolicy_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setQueueRedrivePolicy_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SetQueueRedrivePolicyResponse, SetQueueRedrivePolicyResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SetQueueRedrivePolicyResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setQueueRedrivePolicy_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class removeQueueRedrivePolicy_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (RemoveQueueRedrivePolicyRequest, RemoveQueueRedrivePolicyRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = RemoveQueueRedrivePolicyRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('removeQueueRedrivePolicy_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class removeQueueRedrivePolicy_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('removeQueueRedrivePolicy_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setPermission_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (SetPermissionRequest, SetPermissionRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = SetPermissionRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setPermission_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setPermission_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setPermission_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revokePermission_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (RevokePermissionRequest, RevokePermissionRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = RevokePermissionRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revokePermission_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revokePermission_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revokePermission_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class queryPermission_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (QueryPermissionRequest, QueryPermissionRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = QueryPermissionRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('queryPermission_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class queryPermission_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (QueryPermissionResponse, QueryPermissionResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = QueryPermissionResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('queryPermission_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class queryPermissionForId_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (QueryPermissionForIdRequest, QueryPermissionForIdRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = QueryPermissionForIdRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('queryPermissionForId_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class queryPermissionForId_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (QueryPermissionForIdResponse, QueryPermissionForIdResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = QueryPermissionForIdResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('queryPermissionForId_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listPermissions_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (ListPermissionsRequest, ListPermissionsRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = ListPermissionsRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listPermissions_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listPermissions_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ListPermissionsResponse, ListPermissionsResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ListPermissionsResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listPermissions_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createTag_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (CreateTagRequest, CreateTagRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = CreateTagRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createTag_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createTag_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (CreateTagResponse, CreateTagResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = CreateTagResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createTag_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteTag_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (DeleteTagRequest, DeleteTagRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = DeleteTagRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteTag_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteTag_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteTag_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTagInfo_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (GetTagInfoRequest, GetTagInfoRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = GetTagInfoRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTagInfo_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTagInfo_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GetTagInfoResponse, GetTagInfoResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GetTagInfoResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTagInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listTag_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (ListTagRequest, ListTagRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = ListTagRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listTag_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listTag_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ListTagResponse, ListTagResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ListTagResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listTag_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class queryMetric_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (QueryMetricRequest, QueryMetricRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = QueryMetricRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('queryMetric_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class queryMetric_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TimeSeriesData, TimeSeriesData.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TimeSeriesData()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('queryMetric_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class queryPrivilegedQueue_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (QueryPrivilegedQueueRequest, QueryPrivilegedQueueRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = QueryPrivilegedQueueRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('queryPrivilegedQueue_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class queryPrivilegedQueue_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (QueryPrivilegedQueueResponse, QueryPrivilegedQueueResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = QueryPrivilegedQueueResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('queryPrivilegedQueue_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class verifyEMQAdmin_args(object):
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('verifyEMQAdmin_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class verifyEMQAdmin_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (VerifyEMQAdminResponse, VerifyEMQAdminResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = VerifyEMQAdminResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('verifyEMQAdmin_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class copyQueue_args(object):
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (CopyQueueRequest, CopyQueueRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = CopyQueueRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('copyQueue_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class copyQueue_result(object):
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('copyQueue_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getQueueMeta_args(object):
"""
Attributes:
- queueName
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'queueName', None, None, ), # 1
)
def __init__(self, queueName=None,):
self.queueName = queueName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.queueName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getQueueMeta_args')
if self.queueName is not None:
oprot.writeFieldBegin('queueName', TType.STRING, 1)
oprot.writeString(self.queueName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.queueName)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getQueueMeta_result(object):
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GetQueueMetaResponse, GetQueueMetaResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (emq.common.ttypes.GalaxyEmqServiceException, emq.common.ttypes.GalaxyEmqServiceException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GetQueueMetaResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = emq.common.ttypes.GalaxyEmqServiceException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getQueueMeta_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 29.925274
| 188
| 0.671309
| 15,729
| 139,362
| 5.712823
| 0.016085
| 0.032274
| 0.028445
| 0.021434
| 0.887633
| 0.864774
| 0.857652
| 0.8464
| 0.842939
| 0.840124
| 0
| 0.004487
| 0.214793
| 139,362
| 4,656
| 189
| 29.931701
| 0.816665
| 0.002605
| 0
| 0.865868
| 1
| 0
| 0.027058
| 0.003977
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.006564
| 0.001998
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
052bf1db09ea1105a0395290afe59e32c45209c7
| 2,863
|
py
|
Python
|
allure-pytest/test/status/xfail_call_status_test.py
|
vdsbenoit/allure-python
|
7b56b031c42369dd73844105382e9ceb9a88d6cd
|
[
"Apache-2.0"
] | 1
|
2021-02-19T21:00:11.000Z
|
2021-02-19T21:00:11.000Z
|
allure-pytest/test/status/xfail_call_status_test.py
|
vdsbenoit/allure-python
|
7b56b031c42369dd73844105382e9ceb9a88d6cd
|
[
"Apache-2.0"
] | null | null | null |
allure-pytest/test/status/xfail_call_status_test.py
|
vdsbenoit/allure-python
|
7b56b031c42369dd73844105382e9ceb9a88d6cd
|
[
"Apache-2.0"
] | 1
|
2020-08-05T05:40:44.000Z
|
2020-08-05T05:40:44.000Z
|
import pytest
@pytest.mark.xfail()
def test_xfail():
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_xfail',
... with_status('skipped'),
... has_status_details(with_message_contains("AssertionError"),
... with_trace_contains("def test_xfail():")
... )
... )
... )
"""
assert False
@pytest.mark.xfail(raises=AssertionError)
def test_xfail_raise_mentioned_exception():
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_xfail_raise_mentioned_exception',
... with_status('skipped'),
... has_status_details(with_message_contains("AssertionError"),
... with_trace_contains("def test_xfail_raise_mentioned_exception():")
... )
... )
... )
"""
assert False
@pytest.mark.xfail(raises=AssertionError)
def test_xfail_raise_not_mentioned_exception():
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_xfail_raise_not_mentioned_exception',
... with_status('broken'),
... has_status_details(with_message_contains("ZeroDivisionError"),
... with_trace_contains("def test_xfail_raise_not_mentioned_exception():")
... )
... )
... )
"""
raise ZeroDivisionError
@pytest.mark.xfail(raises=AssertionError)
def test_xfail_do_not_raise_mentioned_exception():
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_xfail_do_not_raise_mentioned_exception',
... with_status('passed'),
... has_status_details(with_message_contains("XPASS"),
... )
... )
... )
"""
pass
@pytest.mark.xfail(raises=AssertionError, reason='Some reason')
def test_xfail_with_reason_do_not_raise_mentioned_exception():
"""
>>> allure_report = getfixture('allure_report')
>>> assert_that(allure_report,
... has_test_case('test_xfail_with_reason_do_not_raise_mentioned_exception',
... with_status('passed'),
... has_status_details(with_message_contains("XPASS Some reason"),
... )
... )
... )
"""
pass
| 34.493976
| 123
| 0.517639
| 236
| 2,863
| 5.775424
| 0.148305
| 0.132062
| 0.070433
| 0.102715
| 0.903888
| 0.865004
| 0.810712
| 0.76449
| 0.730007
| 0.711665
| 0
| 0
| 0.359064
| 2,863
| 82
| 124
| 34.914634
| 0.742779
| 0.722319
| 0
| 0.4375
| 0
| 0
| 0.019504
| 0
| 0
| 0
| 0
| 0
| 0.375
| 1
| 0.3125
| true
| 0.125
| 0.0625
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
056ddd11cd92ff7ec54ff18dea81bf82b6361dbc
| 40,763
|
py
|
Python
|
Zero-Gen/paininass.py
|
sahraoui741/Zero-attacker
|
aeb8ceb444204090f9c2e3642da4f9a0612eb31d
|
[
"MIT"
] | 182
|
2021-10-08T19:55:24.000Z
|
2022-03-31T08:51:48.000Z
|
Zero-Gen/paininass.py
|
sahraoui741/Zero-attacker
|
aeb8ceb444204090f9c2e3642da4f9a0612eb31d
|
[
"MIT"
] | 16
|
2021-10-08T17:46:27.000Z
|
2022-03-31T10:09:56.000Z
|
Zero-Gen/paininass.py
|
sahraoui741/Zero-attacker
|
aeb8ceb444204090f9c2e3642da4f9a0612eb31d
|
[
"MIT"
] | 66
|
2021-10-09T01:35:56.000Z
|
2022-03-26T20:40:17.000Z
|
from pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x09\x00\x61\x0d\x0d\x0a\x08\x2d\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\x76\x27\x00\x00\x00\x00\x00\x18\xac\x30\x57\xe2\x90\x99\xa9\x80\xf2\x3f\x2b\x79\x57\x0c\x4b\xa1\x00\x00\x00\x00\x00\x00\x00\x00\x35\x23\xff\x01\xff\xdc\x7e\x03\x0b\xc7\x6e\x51\xc4\xee\xab\xa9\x77\xd1\xec\x18\xdb\xe3\xb9\xea\x74\x17\xe0\xf3\x2c\xcb\xdf\x80\x89\xa8\xd9\x2d\x4c\x25\xd6\x2c\x0d\x00\x7b\xb6\x2e\x35\x82\xd5\xe9\xc2\x3f\xdd\x97\x9c\x85\x71\x08\x0a\xab\x8f\xa9\x0f\x65\x8b\x87\xa8\x39\xac\x5e\xf1\x49\x62\x3a\xd1\xf8\xc8\x63\x60\x5d\x51\x29\x99\x19\xca\xfb\x4e\xab\x79\x45\x63\x53\xb7\x44\xf1\xb7\xb7\x8d\xb4\x83\x7a\x2d\x55\x38\x9e\xf0\x25\xf7\x41\xef\x9f\x88\xac\x9c\x24\x00\xf5\xa9\xce\xe9\x01\xac\xf2\xa7\xd3\x25\xa6\x98\x4d\x97\x9a\x11\x7a\x00\xb5\xb6\xf9\x9b\xcc\xe2\xda\x77\x50\x0b\xdd\x4b\xd3\x33\x85\x33\xc4\x1f\x2e\xe0\xc5\x9f\xad\xb6\x93\x17\xa0\x58\x8b\xf9\xc3\xf7\xc9\xfa\xac\x53\xc1\x33\x9e\xb9\x28\xea\xe0\x73\x7c\x6d\x24\x1a\x28\x9f\x1b\x7c\x77\xc7\x6d\x6f\x39\xf7\x17\xd2\xa7\x99\xe5\xca\x81\x84\x86\x65\x44\x56\x0e\x45\x11\xc7\xa8\x4a\xb9\x20\x1a\x23\x63\x59\x72\x79\xf2\x88\x2b\xa2\xeb\xc8\x68\x08\xd5\xc5\xe3\x3b\x15\x6d\xf6\x42\xd2\x76\xc5\xeb\x37\x4a\xae\x98\x9a\x42\xbf\x42\x36\x58\xcc\xf5\xe1\xe3\xe2\x1c\x61\xc6\x51\x6c\x6b\x27\x1a\x47\xab\x6e\x9d\x0a\x20\xd3\x3f\x1f\xb5\xb9\x41\xc5\xde\x19\x9f\x78\x4b\x76\xbd\x85\x70\xad\xa1\x67\xdb\xe5\x38\xe1\x5c\x24\xd6\xf6\x20\x63\xa5\x3b\x5e\xf4\xe2\x9e\x50\xe9\x6d\xbc\x1f\xb5\x9f\xe7\x31\x4c\x52\xae\xc6\xe3\x96\x32\xcd\x80\x14\xc9\x10\x1f\xdf\x11\xcc\x3a\x40\xa4\x93\x5f\x75\xb1\x4b\x52\x90\x8f\x34\x86\xa8\x2c\x38\x43\x58\x91\x81\xe7\x80\xfe\xe0\xac\xdd\x35\x81\x0e\xc5\x6f\x99\xf3\xcf\xc2\x97\x76\x05\x3a\x69\xd7\xab\x03\x20\xab\x72\x2a\x24\xbf\x6c\x31\x59\xa8\xe9\xef\x91\x93\x52\xa5\xf6\xbd\xcd\xf7\xcc\xf2\x3a\x38\xb5\x51\x87\xbc\x94\xdf\x85\x1d\xcd\xd7\xa3\xc8\x7a\x0c\x54\x8d\xff\x3e\x00\x09\xc2\x4b\x49\x10\x87\x31\xd2\xcf\x43\x72\xb9\x6b\x16\x9e\x9f\x20\xc0\xd2\x3d\x8a\x03\xe2\x7b\x97\x46\xac\x47\x38\xf2\x48\x47\x8c\xf0\xe8\x97\xd9\x9f\x66\x6a\x9c\x12\x56\xb4\x36\x29\xd8\x2a\x98\x6f\xcf\x84\x51\x62\xce\x06\x44\x9a\xa0\x8a\x4c\x47\x1c\x68\x90\x17\x51\xe4\x60\xb5\x97\x3a\x12\x96\xfd\xc7\xa7\x8e\xd4\x28\xf8\x64\xc7\x22\x7c\x7f\xaf\x43\xd1\x2b\xe8\x8c\xb1\xbf\x1c\xfc\x04\x3b\x36\xf5\x6c\xf1\x42\xa0\x91\xd6\x01\xa0\xb0\xf7\x60\xc1\x76\xbd\x90\x26\x9c\xba\xa5\xe4\x29\x01\x9a\x8c\xf6\x69\x20\xe3\xbf\x21\xec\x4d\xa7\x79\xf4\x25\x3b\xba\x75\x9a\xfc\x12\x16\xc5\x3e\x71\x71\x50\x74\x62\xe1\x84\xc5\x4d\xcb\xc6\x5f\x52\x0c\x98\x83\x5b\xc0\x45\x3c\xa5\x6f\x43\x69\x91\xcd\x42\x5a\xa5\xee\x92\x2f\x80\xef\xa1\x16\x4e\xd0\x1c\x87\xfa\xd0\xf1\x50\x3b\xdf\xbc\xbd\xb5\x09\xfd\x80\xed\x7f\x1d\xc5\x54\xe1\xb2\x44\xf0\x34\xbf\x18\x2f\x4b\xef\x93\x95\x61\xcc\x61\xe6\xa3\x51\x88\x3b\xfe\x75\xf5\xd5\x38\x1f\x04\x28\x8c\xcd\x31\xd5\x9f\x2c\x2b\x57\xe2\x65\xf2\xf8\x88\x9b\x48\x76\xdb\x40\x25\x6f\x01\x79\x77\x41\x8f\xab\xda\x89\x7e\x7a\xc4\xb6\x69\xb6\xae\x5f\xab\x6c\x08\x7d\x2a\x8f\x8b\x28\x76\xf2\x87\xc2\xaf\x0c\xe6\xe7\xeb\x43\x80\xe7\x98\xf9\x29\xf1\xfd\x59\x42\xed\xe6\x06\x8c\x62\x7a\x79\xf5\x51\x51\x00\x85\x61\xfe\xb7\xce\xdd\x32\xa2\x46\xbe\xa4\x01\x8a\xba\x9b\x22\x4c\xbd\xb0\x20\x9c\xd4\x3e\x3b\xb7\x12\xa0\xb2\x78\x72\xed\xda\x81\x82\x55\xfc\xca\xd9\xc0\x2c\xf4\x05\x5b\xeb\x87\xe0\x8e\x06\x89\x2d\xa7\x9e\x97\x46\xfc\xfb\xa9\x96\x5e\xff\xbe\x8a\xc7\x0c\x1d\x20\x38\x05\x89\x97\x53\xdf\xe6\x6a\x3e\xcb\x6b\xd2\x61\x03\x6f\x8d\x96\x30\xec\x90\x6e\xd7\x22\xf0\xf3\x34\xcf\x21\xd7\xc1\x02\x83\x3c\x8b\xfd\x14\xfd\xd5\xc1\xdb\xcc\xff\x04\x87\x3a\x05\xcc\xe5\x25\xdf\xae\x63\xd3\xf1\x96\xc6\x73\x7b\xbc\xf5\x3f\xe7\x0c\x54\x18\xdd\xca\x45\x37\x39\xf3\x1c\xe2\xe4\xba\x67\xd5\x1f\x0e\x8e\xd5\x3a\x76\x36\x13\x89\x48\x7e\xa5\x3d\xa5\xa8\x07\x2a\x79\x98\x67\x65\x37\x80\x13\x6b\xfc\x84\xc2\x34\x3d\xcc\xbf\xad\x30\xb4\xaa\xd4\xc6\xd0\xa0\x08\x82\x12\x1f\x38\x84\x98\x8d\x22\x68\x39\x76\xee\x66\xaf\xa5\x67\x8b\x8b\x27\x93\x8d\xe8\x63\x52\x27\x94\xd7\x69\x3e\xf2\x2e\x26\x36\x8e\x90\xa2\xe0\x34\x7d\x12\x8c\x3b\x97\xd7\xf8\x3b\x60\xed\x43\x2e\x3f\xb5\x11\xcd\x6c\xd6\x95\xfd\x19\x3c\xaf\x40\x73\x6a\x0e\x71\xb3\x71\x97\x3d\xf8\x93\x17\x81\xa9\x02\x30\x53\x39\xc1\xca\xff\x15\xc7\xe1\xa0\x20\x1f\x58\xdb\xef\xa6\x16\xe5\xb2\xa5\x83\x98\x2a\x13\x15\x12\xde\x35\xa9\xab\xce\xa0\x5c\x54\x43\x0f\xc4\x51\x5c\x38\x50\x40\xa3\x7e\xa0\x3c\x5f\x19\x28\x8b\x06\xd9\x69\x5f\xc5\xb4\x0b\x0c\x91\xd2\xfb\x72\xa7\x98\xfc\xf6\x67\x3e\x15\xbf\x74\xc8\x04\xad\xad\x77\x35\x9e\xc4\x2b\xa3\x71\xa4\xeb\x4f\x21\x55\x5c\x68\x79\x64\x79\x69\x25\x1f\x68\xbb\x83\x4d\x40\x19\xc2\x26\x3c\x18\x20\xf0\xb5\xd6\xb5\xcb\x33\x2d\xb7\x6f\xe2\xed\x7b\xb6\xf3\xcd\xcc\x1d\x82\xcb\x4e\xc3\x32\x80\x7e\x97\x12\x61\x2e\x6e\x61\x9b\x60\x4c\xb0\x79\xc2\xf6\xad\x67\x58\x28\xf0\xa9\x47\xb6\xfc\x7a\x63\xac\x5f\x0b\x57\x85\x47\x8c\x79\xb8\x60\x83\x0f\x72\xa6\x53\x83\x08\x2e\x09\x25\x1d\x7d\x03\xc1\xcc\xc8\x5c\x9e\xd8\x64\xe5\x94\x0e\x8d\x5e\xf2\xed\x0a\x2a\x0c\x01\x98\xbe\x7a\x7e\x99\x5c\x7b\x4a\x0a\x7b\xed\xf3\x53\x5a\x77\x2a\x8c\x99\x03\xa4\xdb\xc9\xdf\xc9\xb3\xfa\x43\x27\x5b\x5e\xe3\x7a\x61\xde\xed\x33\xa7\x66\x0c\x39\x23\x30\xdc\xf6\xa0\x58\x44\x19\x03\xd6\x05\xa4\x83\x0a\x0b\x0e\x9f\x47\xc2\xde\xd6\xc5\x44\xc2\xe8\x9d\x65\xd5\x55\x52\x6f\xf3\x3e\x97\xac\x44\x2a\x3c\x80\xa2\xe1\x4a\xfa\xcb\xc8\x45\x4c\xd7\x02\x91\x04\xdf\x36\x05\x6a\xbe\xe1\x7a\x5c\xbb\xfb\x2f\x03\x7d\x5d\x0d\xca\xea\x33\x43\x49\x74\xc5\x0c\x44\xde\x6c\x81\x7d\x88\xc2\x64\x7a\x6d\x08\xb3\xa3\x28\xb2\xa9\x35\x29\x40\x08\xd7\x12\x7c\x4c\xfa\x76\xa9\x0e\x0f\xa2\x12\x7b\x67\x4d\xba\x20\xc3\x33\x3c\xba\xf7\x1c\x48\xab\x29\x7a\x26\x6a\xa5\x15\x46\xf2\x44\xb2\xec\x2b\xda\x5c\xf9\x2c\xdc\x7e\x89\xcd\xdb\xd1\x79\x9e\x67\x13\xd9\x1b\x5f\x29\xbe\xa6\x99\x66\x43\xea\xc5\xf1\xcc\xe7\x05\x2f\x79\x65\xba\xf5\xb0\xd2\x0b\x40\x75\x66\x1d\xda\xbb\xb5\x08\x2d\x4c\xab\x6c\xfc\x79\xbe\x4b\xd7\xa5\xff\x3c\xa5\x9c\xb4\xd8\x59\xc1\x83\x3a\x57\xf8\xec\x9f\x7a\xd7\x08\xac\x6b\x3d\xb6\xb1\x51\x3b\xd3\xd1\xc3\x9b\xee\xf7\x53\x89\xf4\x4e\x55\x32\xc3\x71\x1a\x28\x9d\xf4\xf8\xd4\xe5\x0b\xb0\xa8\xf4\xe7\xc1\xb3\xaf\x44\x64\x90\x12\x44\x29\x6e\x9e\x9f\xbf\x0a\x5b\xe2\x38\xb0\x8c\x9c\x89\x45\x54\x48\x1d\xfb\x41\x0d\xe0\x45\xdc\x2f\x6e\x96\x5c\x17\x28\x55\x31\x08\x62\x44\x2a\x1e\x37\x5d\xbd\x55\x0d\x10\x62\x5a\xf3\x05\xf8\xac\x16\x6a\xab\x9c\xc0\x22\xe9\x5c\x46\xe3\x65\x0c\xa5\x3c\x3d\x62\x79\x0b\xc0\xd9\xb4\x63\x77\x72\x9d\xba\x17\x22\x4f\x2f\xd7\x4a\x2c\x5c\x6a\x17\xb7\x53\x4a\x5e\xb4\xf9\xb5\x6a\x57\x4a\x8e\xda\x8a\x7f\x28\x65\x58\x7c\x05\x0d\xbe\x84\xea\xbc\x9b\x03\x6d\x7a\x0d\x4c\xd0\x97\x63\xa8\xb3\xd4\x6f\xec\x50\x2a\xb9\x70\xa0\xb6\xe3\xee\xc1\xc0\x58\xe1\xb7\x77\xf9\xc9\x1b\x0f\x5f\xc2\xe2\xbb\xf2\x08\x97\x44\xd7\xf7\xcf\x20\x15\xab\x32\x28\xaf\x4d\x3d\xaa\x4b\x8e\x16\x95\xb1\x5f\xd4\x8b\xc7\xd9\xe3\x2f\x4b\xe4\x0b\x6e\xe5\x1a\x7d\xd0\xd1\x42\x65\x40\xcb\xad\x27\x18\x15\xe4\x6a\x9b\x1b\x37\xfb\xc4\x9d\xab\x06\x04\xc4\xbd\x3d\x0d\x87\xd1\x9f\x04\xb9\xa7\x91\x53\xa6\xe0\x74\x7a\x83\x3e\xa0\xdd\x62\x85\x02\xbc\xa5\x32\x23\x34\xa4\x57\x13\x63\x69\xc8\x39\xc7\xe0\x53\x99\xb0\x94\x33\xf8\xdb\xa2\x30\x73\x53\x6c\x54\x16\x0f\xba\x2b\xdf\xf0\x99\x8d\x9a\xad\x9a\x06\x17\xc3\x93\xf0\x78\x87\x65\xcc\xed\x01\x3a\x79\x27\x66\x14\x79\xd0\x88\x52\x71\xa0\x9d\xf3\x91\x7b\xf8\x2b\x31\x6d\xb2\xc4\xa8\x42\xc7\x8b\xe4\xd7\x15\x5e\xed\x86\xa3\x57\x21\xe2\xcb\x3c\xd0\x74\x94\x80\xdc\xe7\xf9\x9c\xea\xaf\xd0\x5d\x75\xf0\x40\x72\xc3\x49\xff\x4f\x30\xcf\x76\xda\x7d\x0b\x61\xfb\xb0\x3f\x99\xed\x76\x29\x6b\xa2\xad\xb0\xa9\x39\x40\x44\xb7\xa5\x1b\x16\x47\x81\xa6\xc9\xb0\x3d\x7d\x8b\xdd\x82\xb7\xeb\xa8\xdd\x51\x02\x79\xed\xbc\x88\xdf\x98\x52\x28\xe8\x90\x8d\x08\x9c\x0b\x85\xfb\xcf\xb7\x7a\x27\x81\xad\x07\x6d\x30\xde\xb6\x48\xb4\xb0\x65\x6e\xb9\x18\x27\x80\xb0\x9d\xa5\xa1\x8e\x08\x07\x3c\x1e\x7a\xed\x52\xa5\x06\xe5\xfe\x80\xb6\x02\x83\xbb\x27\x81\x3d\x54\xc5\xd6\xc6\x77\x46\x29\x27\xcc\x72\xc7\x0d\x7c\x1d\xbe\xcf\x8b\x9a\x99\xd9\x5a\xbe\x85\xb5\x9f\x9e\xe3\xe9\xc1\x78\x45\x1b\x3f\x39\xbd\x3f\xa5\x9a\xc8\xdc\xd8\xfc\xa5\x25\xd0\xc2\xf2\x00\x88\x69\x47\xce\x86\x00\x23\x6a\xb2\x2c\x77\x6c\xfb\x5f\xf1\xd5\x1c\xb9\xd8\x8b\xc1\x45\xab\x78\xc8\x13\x62\x44\xae\x1f\x9e\x6b\x48\xaa\x58\x7c\xd1\xca\x10\xfd\xb9\xc9\x20\x81\xe8\x89\xf3\x95\xad\x4c\xca\xf9\x17\x11\x90\xdb\xc2\x35\x9e\xa2\x1d\x87\xf5\x80\x17\x56\x93\x24\x8d\x11\xd2\x7d\x92\x57\xfe\x03\xd3\x62\xe3\x78\xf2\x63\x0f\x69\xdd\x86\x9f\x62\xa7\x6e\x5d\x35\xcd\x30\x28\x2d\x11\xe7\x91\x7a\xb0\x39\x57\x4a\x4c\x17\x86\x22\x74\x4f\xd8\x5d\x6e\x9e\x72\x7f\xac\x44\x32\x4a\xe2\x3c\x3a\x72\x77\xc2\x10\xf1\x11\x65\x16\x66\xbc\x42\xc9\xce\x47\x1f\x80\x79\xbe\x3b\x29\xa2\x1c\xaa\xf5\xd5\xcc\x1f\x68\xcc\x28\x05\xbb\xd9\xcd\xdc\xaf\x9e\x9f\x85\xa1\xfc\xa8\x66\x53\x11\xce\x81\x63\xc2\xd6\x18\x52\x04\x23\x2c\xdd\x63\xbc\xbe\xc1\x30\x08\xf1\xe9\x9b\xa9\x9b\xa3\xa9\x7c\xe5\xc1\x83\xc6\x6f\xa2\x79\x01\x48\x70\xc8\x97\x05\x85\x39\x24\x9e\x22\xa1\x6e\xc0\x0d\x20\x91\xb6\x10\x88\xad\x9e\x63\x9a\xdd\x7d\x50\xce\xfc\xbc\x14\xd6\x54\xc5\x4c\xf2\xf6\x53\x72\x1b\xd6\xbb\x2e\x42\x9d\x32\x1a\xac\x1b\xe2\xb2\x18\x5e\xa0\x1c\xe2\xa0\xfa\x88\x66\xfd\x6d\x57\x73\x2d\x77\x82\xa3\xb6\x14\x7d\x7c\xb7\x7b\x40\x0f\x83\x9d\xed\xb6\x01\x36\x6d\x28\x8b\x68\x17\xed\xd8\x8a\xfa\xc0\x7c\x47\xaf\xaa\x21\xfa\xc1\x6f\x48\x66\x0e\x22\x2c\x84\x78\x79\xb3\xc4\x24\x59\x7b\xbf\x77\x77\xd4\x67\x14\x01\x75\x01\x19\x8e\x3e\xf6\x00\x63\x4e\x8a\x34\x07\xec\x53\x61\x32\x38\x73\xca\x6d\x54\xd6\x3f\x52\x49\x97\x4c\x6a\x76\x3b\xbc\x90\x2b\x22\x8f\x06\x1e\xbc\xb4\x26\xb0\x28\x53\xd5\xf0\x0c\xb6\xdc\xee\x96\x5a\x16\xe4\xcd\xed\x2b\x53\x68\x55\x54\x6d\x43\xbc\x0b\xf4\xda\xa4\xa6\xe7\xc8\xca\xd8\x29\x9e\x45\x8e\x54\xae\xa1\x22\xe5\x55\xd2\x0d\x20\x30\x70\x2a\x97\x0e\x5a\xdc\x25\xd6\x83\x28\x70\x51\x8e\xfb\x95\x8f\x8c\x62\xe7\x05\x3d\xf3\x74\x1f\xb1\xf5\x3e\x06\x9a\x1e\x9b\x6b\x82\xd4\x1b\x3b\xa8\xd3\x49\x9a\x2c\x8e\xd0\x7d\xb5\xa2\x5d\x35\x1b\xcb\x18\x79\x62\x80\xfb\x5e\x40\x6c\x0f\xb8\x5b\x85\x1e\xe5\xba\xd6\xf3\xd8\x06\xeb\x9d\x26\x6b\x28\x74\xf0\x77\x5b\x3e\xe3\xfa\x13\x2a\xe1\x9e\x86\x38\xbc\xfc\x74\x18\xa3\xaa\x47\x8b\x1d\xbe\x7f\x28\xfd\xb2\x74\x60\x97\xad\x5c\x36\xb0\x65\x29\x23\xa3\x1f\x30\xc2\xec\x1a\xb3\xd4\x09\x06\x30\x8e\xb5\x14\xa9\xa9\xe0\x54\xc8\xa0\xfe\x73\x8d\x19\xc7\x25\xa3\xa7\x2d\xb9\xb4\x55\x4f\xa4\x65\x22\x8b\x43\x8c\x49\x3e\x7a\x3b\x31\x5a\xea\x3c\xa5\xaa\x94\x06\xd7\x17\x55\x68\xf1\x08\xc2\xcb\x5c\xf8\x27\x29\xbe\x42\x10\x4a\x94\xa6\xfa\x5a\x27\x94\x18\xf1\xeb\x57\xa9\x0d\x5a\xd4\x76\x76\xa9\x25\x2d\x1c\xb8\xb6\xc5\x77\x24\x75\xe0\x35\x27\x17\xcb\xa5\x67\xcf\xaf\x69\x2a\x16\xc6\x64\x52\x88\x53\x76\x20\x3b\x09\xd5\x78\xe2\xd3\x67\x74\xe1\xae\x60\xe5\x20\x03\x2c\x74\x79\xc2\x27\x40\x39\x95\x51\x1e\xaf\x6c\x61\x2d\xe3\x64\xd5\x90\x99\x2a\x98\xea\xbf\xa5\x0f\x23\xb9\x35\x49\xb7\x03\xa7\x13\xef\x1d\x9d\x18\x0f\x52\xdd\x33\x86\x23\x99\x21\x08\x92\x6f\xe6\xde\x23\x47\x7e\x0b\xf5\x5d\x13\xda\xa5\xff\x5d\x0e\x31\xe7\xfc\xff\x3a\xbb\x35\x9e\x8d\x62\x4c\x8d\x07\x47\x53\xcc\xd7\xfb\x7d\xe0\xa6\xa6\x01\xdf\xb4\x34\x80\x35\x25\x35\x0a\xb1\x97\x5e\xb6\xfe\x52\xfe\x20\xb8\xb6\x43\x93\x14\x83\x14\x79\x5d\xd7\x64\x50\xf7\x6a\xdc\x84\x27\x21\x77\x34\x7a\xfa\xdc\xac\x12\x28\x38\x76\x84\xa9\xa0\x3e\x34\x82\x8e\x81\x65\x48\x70\x7a\x38\xfb\x17\x00\xa0\xee\x42\xd2\x21\xca\x20\x1b\xaf\x7a\x2a\xc7\x6b\x49\x27\xb6\xc7\x47\xf0\x02\x11\x42\x25\x8b\x07\x94\x24\x53\x28\xab\x23\x7c\x30\xca\xac\xcd\xf3\x96\x66\x5b\x30\xad\x92\xa1\x53\xbd\x87\x99\x27\x11\x9e\x3d\x6d\x5b\x99\x43\x2b\xb4\x67\x92\x50\x32\xbc\xe5\x95\x41\x72\x52\xdf\x43\x84\xc6\xde\xa1\x1d\x4c\x84\x42\x9e\xfd\x19\xa7\x50\x02\x73\xf2\x2e\x69\xb9\xb0\xec\xfd\x7d\x5c\xa8\x78\xfd\x9e\xbc\x50\x08\xc1\xb6\xad\x63\x64\x77\x64\xf2\xdb\x51\x06\xc1\x1f\x9d\xed\xd3\xb5\x24\x40\x85\xbb\xed\xe4\x48\x3d\x48\xb7\x6b\x10\x63\x35\xb0\x46\xcb\xaa\x55\xca\xdb\xef\xc5\x3a\x89\x45\x23\x74\xdc\xfc\xe1\x2c\xbc\x1e\x86\x58\xed\x8d\x2a\xb8\x16\x12\x4f\x5e\x18\x0e\xb1\xf4\xe7\x60\xd0\xff\x3d\x41\xda\xe9\xe3\xd0\x60\x70\xf8\xe2\xb2\x0d\x48\x88\x26\x2f\x55\x02\xc2\xd0\xca\x08\xf8\x54\x21\xb6\xc3\xc6\x11\xb7\xd5\xa0\x2a\x31\xd2\xd7\x3e\xce\xda\x13\x67\x3f\x47\xcd\xcd\xf5\xb0\x46\x10\x27\x52\xd7\x69\xb7\xe1\x05\x98\x2c\xfa\x91\x7d\x46\xf4\x21\x1e\x5f\x45\x8e\x64\x3b\x11\x57\xae\xcf\x45\x37\xe1\x87\x86\xc2\x9d\xa7\x8e\x63\xe5\x68\xe3\xfb\x1c\xdb\x14\x89\x79\x99\xa7\xfc\x16\xf2\x7d\xec\x60\x97\xd5\x83\xad\x70\xac\xd9\x39\x7e\x31\xb3\x11\xfd\xd7\x63\x96\xeb\x9c\x02\xb4\x54\xff\x34\xe6\x02\x6d\xbd\x7e\xb3\x10\x21\xb8\xda\x8f\x62\x86\xdb\x0b\xf7\x1d\x63\xda\x66\x3b\x07\x2b\xc6\x1a\xc8\x44\x2d\x4f\x4d\xee\x67\x73\x48\x50\xc3\xd5\x8c\x1c\xe6\x2f\x4b\x79\xc3\xb1\x06\x1c\x3e\x38\x06\x98\xa0\xcf\x12\x93\xf3\xa2\xf0\xc0\xd6\x99\x2e\xee\x93\xa9\x44\xc2\x1a\x46\x4d\x18\x66\x1f\x0e\xf1\x68\x2c\x9c\xb2\x5e\xe0\x7b\x15\x59\x27\xfd\x98\x3a\xe7\xe0\x73\xf7\xde\x49\x56\x2b\x7a\xb0\xfe\x08\x28\x76\x30\xc6\x83\x3c\xa3\x11\x12\x99\x6d\xca\x67\x64\x29\x83\x74\x5f\x66\xe1\x93\x49\x01\xff\x3f\x10\xd2\x37\xa0\x53\xfe\xeb\x63\x80\x98\x21\x74\x42\x2e\xb8\x5a\xba\x3f\xa0\xa2\xd5\x12\x03\x34\x13\x3e\x72\x7f\xc9\x39\x87\xeb\xea\x28\xf6\x7f\xe3\xc0\xac\x50\x8d\x0e\x45\xb3\x1e\xfb\xaf\x15\x00\xf4\x47\x32\x4e\x90\xbe\xd1\x95\xf1\x9b\x79\x9f\xab\x9b\x29\x65\x64\x2d\xe2\xe7\xdf\x13\x9a\x37\xf6\xee\xc0\x51\x02\x64\x54\x81\xc0\x9d\x0c\x49\xee\x36\x3e\x0b\x93\x38\x0e\x28\xae\x83\x19\xc7\xd1\x78\x81\x08\xf1\xf0\xb0\x02\xa1\xc8\x55\x4d\x17\x7a\xf7\xf2\x39\xb0\xca\xf4\x7b\xc6\x5b\xf5\xa0\x00\x9d\x8d\x18\x1a\x5d\x76\xc6\xcb\xa7\x36\x8e\x51\x0d\x17\x71\x02\x9b\x71\xc2\x64\x2d\x7b\xe7\x76\xe6\x29\xc7\x81\x24\xbf\x40\x68\x18\x1d\x8c\x3a\x5c\x57\xd2\xb9\xc1\x63\x87\x8a\x65\x47\x0f\x76\x32\xbb\xe1\x5b\xa7\xd9\x7e\xac\x14\x6d\xac\x64\xc6\xa6\x0c\x21\x4e\xa4\xbb\xf8\x5b\x4c\xd7\x1d\x73\x0e\xe1\x2b\x8a\xfd\xf4\x79\x0f\xe0\x8b\x06\x62\x4b\x22\x80\x43\xb1\x29\x14\x95\x45\x49\xf5\xb2\x83\x20\xc4\x5d\xdb\xc3\x22\x56\x80\x6c\x61\x56\xd8\xc8\x9e\x8b\xa9\x60\xc7\xf0\x18\x07\x3b\xd7\xde\x18\xb4\x20\x9f\x96\x42\x91\xd6\x4e\xf9\x88\xc8\x3f\x2b\x7b\x34\xc3\xe2\x04\xc3\x19\x9d\x39\xaf\x2d\x09\x21\x1f\x81\xb9\x43\x90\x57\x37\x79\xbe\x27\xd6\x4b\x20\x0e\x67\xc2\x39\xb4\x02\x9a\x5a\xd2\x7e\x7a\xe0\x50\xcb\x19\x61\x02\x5f\xa8\xe4\xe8\xde\xc5\x32\x78\x57\x24\x95\xdd\xd4\x92\x2d\x0e\x93\xc8\x05\xa9\x1c\x09\x46\x15\x13\x12\xcc\x64\x80\x3c\xbb\x9c\x7b\x86\x57\x7d\x90\x33\xd7\x35\x49\x2d\x54\x73\x05\xe4\x1c\x04\x14\x07\x83\x12\xa9\xd3\x60\x6e\xdf\x7f\xa2\xad\x05\x76\xe3\xfa\x2a\x3a\xb0\xec\x1d\xff\x0a\x74\xd2\x38\xf6\x86\xbf\xf5\x8f\x19\x13\x15\x77\xff\x9e\x73\xf0\x75\x4e\x36\x7e\xfb\x15\x3a\x92\x45\x8a\xb5\x98\x87\x73\xef\x2f\x3e\xd9\xf9\x0c\x46\xc4\xf3\x98\x45\x9b\xe4\x2a\x6a\x96\xd0\x31\xad\xe1\x23\xd0\x78\xd2\xf2\x3e\x6e\x58\x6b\x40\x09\x61\x00\xee\x61\x1a\x98\x7e\xce\x53\xc2\x19\x33\xef\x99\xe3\x60\x1f\x71\x4b\x80\x06\x6e\xc2\xc9\xa3\xf7\x2c\xbc\x16\x6b\x26\x9f\x83\x7c\x3c\x8b\xb9\x85\x97\xdf\xd0\x23\x5a\xda\x87\xe6\xdd\x12\x6d\x5d\xcb\x0b\x96\xb8\xc5\x89\x3b\xc1\xee\x63\x7e\x19\x36\xed\xe7\x70\x1e\x74\x55\x34\xc0\xec\x56\x0e\xea\xbb\xea\x24\xd5\x52\xfa\x07\x40\x61\xfd\x02\x43\x62\xa2\xa6\x86\xc8\x80\x83\xeb\x4e\xb4\x17\xb6\x1d\x19\xa8\x32\xf8\xa7\xfd\xb7\xfe\xe0\x32\x00\x3b\x56\xb2\x5f\xe7\x20\xdc\xcd\xc5\x66\x0c\x43\x44\xc9\xe6\x8d\x32\xa4\x87\x7d\x5c\xb8\x73\x37\x2d\x15\xab\x6c\xcf\x72\xae\xbf\x41\x2c\x62\x3d\x04\xd7\xe3\x32\xf2\xe0\xf1\x6a\xcf\x27\x99\xe8\x23\xce\x71\xb2\xae\x9c\x4e\x95\x38\x4b\x75\xfb\xa8\xb4\x66\xf7\x20\x08\x40\xb2\x2a\x2e\xc0\xfd\x57\xae\x40\xcc\x1a\x92\xe2\x60\x5e\xb4\x4b\x01\xae\xcc\x50\xc0\x54\xf5\x4c\xcc\x6d\x34\x60\x92\x52\x32\xac\x2d\x4a\x74\xf8\xba\x82\x20\xd6\xf4\xaf\xd0\x4c\x3b\x33\xd7\x29\x0a\xfc\x2d\x4e\xc3\x97\xd6\xba\x29\x57\xb9\xe4\x77\x27\x39\x8d\xd0\x3f\x11\x33\xea\xe6\x7e\x48\xb2\xd0\x46\xa6\x5e\xb2\x88\x73\x3a\xf8\x6c\x88\xce\xe3\x34\xb3\x48\xa3\xd5\x3f\xf4\xae\xa3\xeb\x76\xf6\x31\x00\xb9\x9e\xb3\xc6\x07\xfe\xbe\x19\x60\xff\xc2\x92\x64\x32\xd3\x44\x21\xd1\xd0\x14\x54\xf4\x94\x9f\xf3\x87\x8a\xf1\xb1\x89\xef\x1d\x44\x2c\xcf\x22\xec\x19\x8f\xd5\x2e\x99\x42\x68\x2b\x80\x7a\x06\x57\x6c\x61\xdf\xa8\x26\x3f\x85\x51\xee\xf1\x51\x36\xb0\x14\xe2\xaa\x18\x74\x8c\xe1\x19\xf9\xfb\xcb\xf5\x42\x12\xd9\x0b\xdf\x4e\x21\x92\xf5\x2c\xe7\x23\x37\x81\x23\xc9\x50\xa4\x6f\x77\xce\x8b\x9c\xe5\x3e\x57\x67\x63\xf7\xcd\xd1\x67\x73\x39\x53\x39\x56\x4b\xb2\x0c\xa9\xc1\x2c\xfd\xae\x7f\x9d\xfb\x1f\xee\x34\xb4\x05\xe4\x9d\x5f\x60\xfc\x3b\x2c\xd6\xea\xef\x97\x2f\xc7\x3e\x18\x70\xc0\x6e\x04\xc8\x85\x52\x61\x85\x1d\x6b\xf5\xe9\xff\x40\xef\x30\x14\xa2\x6b\x60\xa1\x43\x16\xb3\x4a\xa5\x02\xcb\x09\x41\x3b\xd5\xa1\x2a\xb3\x3e\x08\x2a\x7c\x0f\x45\xdc\x4f\xc2\xcc\x74\xf5\xb5\xae\x23\xb9\xad\x11\x47\xc9\xbe\x50\x94\x70\xb7\x46\x95\x43\x3e\x74\xca\x89\x96\x79\x2a\x9c\x75\xf2\x4c\xd7\xc8\xff\xff\x3e\xbc\x39\x07\x89\x27\xa5\x49\x20\x33\x7f\x88\xe2\xf2\xba\xa2\x2f\xe5\x12\x17\x3c\x76\xea\x4f\x3c\xfe\xe0\xdc\x71\xd4\xe4\xb2\x8e\x77\xd0\x79\x65\x24\x0f\x15\x98\x7b\x64\x67\xf8\x9b\x80\xc6\xeb\x4f\xeb\x43\x09\x3d\x6e\x40\x6f\xcf\x96\x36\xee\xe6\x6e\x47\xe7\xac\x35\xa5\xda\xd1\xa3\x56\xdc\xd1\x8c\xe1\x38\x15\x08\x77\x0f\xcb\x78\x26\x73\x69\x90\x27\xf1\x71\xbc\x80\xf5\x03\xa8\x92\xc7\x68\x27\x84\xe4\xdb\xcf\xc1\x9f\xf1\x16\x26\xe9\x23\x2b\x5e\x26\x1d\xe1\xa6\xb3\x86\xf5\x24\x88\x8c\x48\xd9\x3b\xd3\xb8\x5b\x0c\xfe\x70\xa7\xfc\x51\x1a\x41\x01\x9d\x01\x1b\x40\xc4\xdf\x43\x9a\xd4\x07\xcf\xee\x7f\x94\x55\x42\xac\x4c\xac\x6c\xfe\x1c\x94\x56\xd4\xba\x08\x53\x87\x0a\xe6\xa3\x96\x15\x3c\x06\xcb\x1c\x64\x8e\x59\x54\x7f\x1e\xda\x5e\xab\x65\x06\x08\x49\xd8\xb0\x15\x57\x49\xd5\x00\x37\x49\xfe\xe8\x4c\xe3\xd8\xb2\x6c\x30\x42\xa3\x3b\x0c\xb6\x15\xb0\xb6\x34\xe2\x22\x3d\x8e\x71\x4f\xd6\x18\xd4\x55\xab\x62\x01\x31\xa1\x91\x57\xb5\x16\x23\xd0\x22\x5b\x4f\x0c\x31\x08\x3d\x29\xa5\xa9\x05\x80\xc7\xce\xac\x63\xdd\xef\x9b\x59\xac\xab\x26\x47\xd9\xe2\x29\x23\xd2\x11\xee\x6f\x1a\x14\x7d\x18\x6f\xa0\x9e\x4b\xec\x3f\xbc\x53\x2e\x48\xd5\x6e\xf2\xba\x9e\x93\xa4\x54\x71\x8c\xb1\xff\x3c\x92\x92\xbc\x2a\xd3\x69\xa5\xe2\x44\x16\x48\xe3\xa9\xf1\x34\x8b\x06\xf3\x5a\xf1\xdd\xa6\x69\x4a\x8a\xa4\xc7\xa6\xa0\x26\xa3\x0b\x1e\x0e\x7b\x8d\xed\x3b\x2a\xb4\xa5\x86\x3a\x0e\x58\x2e\x59\x54\x52\x5b\x56\xb2\xfa\x24\xec\x0d\xbe\x0c\xc5\x3a\x95\x8a\xb8\x3f\xc1\xcf\xfb\x38\x84\x89\xaf\xdb\xeb\x4b\xd1\xb4\xde\xbb\x8d\xcd\x71\x2b\x4c\x6a\xff\xf2\xc9\x45\x77\x78\x91\x5e\xbb\xab\xa6\xa3\x9c\xc0\x08\xd9\x23\x9d\x10\x39\x7a\xf6\xeb\xf3\x59\xdb\xf0\xa8\x76\x54\xdd\x0f\x6b\x6b\xa5\xf6\x7a\xa2\xd6\x5a\x18\x13\xf3\x68\x8e\x4b\xf6\x87\x67\x2f\xa8\xcc\x02\x4f\xd6\x32\x77\x70\x1b\x80\xe6\x05\xb4\x71\x6d\x99\xa0\xdd\xc4\x99\x7f\x84\x0a\x1e\x57\x77\x62\x75\x65\x13\xea\x2f\x1e\xb7\x29\xc5\x87\xac\xdb\x18\x8b\x6e\xf1\x82\xea\xa6\xdd\x26\x75\xc4\x5e\xcd\x5c\x3b\xf6\x03\x95\x73\x4f\x40\xe8\xba\x4b\x7c\xc3\x1e\x41\x82\x1e\x8a\x78\x42\xea\x6a\xae\xa1\x31\xac\xe4\xbc\xdd\x0c\x43\x63\x2f\xe2\x73\xf9\x5b\xcb\xac\x77\xb6\xb2\xef\xae\x47\x5e\x36\xe0\x0f\x0d\xda\x4e\x34\x12\xaf\xe3\xff\xdb\x51\x4c\xfd\x4e\xbb\x0f\x66\x2e\xf8\x35\xa0\x0f\xe9\xd4\xf7\x45\x50\xe1\x0d\x8d\xc5\xc3\x43\xcf\x29\x43\x11\x75\x1f\xe4\x79\x6d\x7b\x4d\x7d\x1c\x11\x55\xcc\x7b\x14\x8a\xe4\x27\x66\x07\x89\x9e\x46\xe7\x70\x00\x42\x3f\xe6\xe0\x35\x13\x1b\x10\x57\xdf\xe8\xf7\x13\x51\xa8\x59\x55\xe3\x5b\x97\x44\x94\xdb\x04\x45\x52\xa9\x2d\x65\xb7\x38\x35\x5d\x74\xe3\x24\x1e\xd1\x72\x7d\x93\x2d\xd2\x91\x56\x6f\x88\x3e\xa5\xf0\x56\x51\xb3\x14\x7a\x1e\xf0\x66\xe4\x15\xff\xaf\xc0\x73\x7f\xcb\xd4\xb0\x79\x89\x16\x01\xe9\xf2\xc5\xd3\xd5\x98\x14\xf5\xcd\xf8\x49\x12\x03\x65\xf1\x0f\xf5\x3b\xaa\x34\x5d\x61\xe3\x96\x6b\xfb\xa2\x79\x41\x12\xab\x3e\x1b\x56\x88\x4d\x38\xbc\x61\x05\xda\xdb\xdb\x82\x2e\x8d\x2a\xa3\xf0\x45\x33\x8b\xf9\x05\xc8\x39\x9d\x4d\x23\xf7\xc6\x6f\xf0\x89\x16\x5d\xc0\xac\x83\x71\x1e\x5d\x76\x37\xa3\xd7\xca\xe4\x66\x89\xc2\x35\x34\xda\x75\x68\xe9\x53\x9e\x0b\xe2\x9f\xfe\xf8\x01\xae\xbd\xc2\x0f\x2c\x55\xb7\xd5\xa7\xa6\xd6\x12\x93\x38\x95\xf8\x48\x39\x78\xa4\x29\xa8\x31\x88\xdc\xaa\x30\x69\xa3\xb2\x31\x9d\x84\x69\x19\x57\xbf\x7e\xa0\xc6\xf0\xa6\xba\x2e\x5b\x68\x22\x3b\x6b\x97\xcc\xe6\xf7\xba\x7c\x55\x24\x89\xfe\xcb\x54\x05\xe5\x17\xf3\x14\x6f\x40\x90\x16\x39\x2d\x2d\x5e\xf2\xf4\x8f\xe5\xfd\x5a\xc9\x87\x3c\xfc\x5f\x79\xf7\xf0\x1d\x74\x6d\xa6\x11\x94\x0b\xe8\x1b\xe2\x63\x4c\x89\xbb\x47\xff\xb9\x48\x70\xe0\xbe\x69\x03\x10\x00\x3b\x29\xd2\x57\xe4\xdd\x8c\x28\x1f\x3e\xcf\x0e\xbc\xdd\xb2\xf6\x33\xa3\x57\x98\xe0\x65\x1b\xab\x58\xc6\x75\x3b\xcb\x07\x38\x93\xd0\x8d\x5e\x5b\x2b\xfa\x07\xc5\x69\x1f\xe4\x24\xea\xc2\x0f\x56\x94\x2e\x69\xc4\x38\x93\xc5\x62\x47\x8a\x10\x53\xbe\x9c\x33\x39\x16\xb2\xdf\x4c\xe1\xca\x5f\xe4\xb1\x71\x3b\x0a\xea\x34\xc7\x23\xf4\x90\x54\x20\x80\x6f\x03\x12\xaf\x5a\x4c\xa8\x34\x9e\xfd\x9a\x79\x03\x74\xac\x76\x52\x4c\x25\x2e\x9c\xdf\x1f\xec\x96\xd9\x31\x7f\xc3\x55\x17\x39\xdb\xf9\xc3\x40\x96\xd0\x1c\x17\xe6\x5c\x54\xda\xda\xc7\x66\xd0\x8b\x77\xd9\xba\x1b\x43\x5e\x11\xc3\xad\xb2\x0e\x90\xa9\xe3\x06\x91\x05\x8d\xd4\x8e\x5c\xb2\x88\x79\x12\x6e\x6e\x5a\x1d\xa3\xac\x92\x0a\x1c\xb9\x4e\xa3\x4e\x1f\x3b\xca\xc4\xf8\x70\x3f\x1f\x7a\x8f\x8e\xce\xce\xcd\xab\xac\xa0\xed\xb7\xa3\x3a\x14\x9b\x1f\x64\x7f\xc4\xb8\x97\x4f\xd1\x69\xd5\x2a\x09\xcf\x65\xab\x45\xf9\x71\x60\xa9\x21\x0c\xc4\xf5\x09\x3a\x40\x9c\x9d\x16\x3a\xa9\xd0\x5a\xc8\x1b\x9e\x6c\x87\x0d\xab\xa6\x9c\xb7\xfd\x52\x05\x22\x18\xbe\xf9\x69\x84\x5e\x18\x9b\x0b\x68\x86\x06\x01\xf5\xe9\xdf\xe7\x47\x1f\x54\x89\x43\x11\x54\xdb\xf5\x50\xaa\xa9\x43\x50\xec\x43\x91\x01\xf3\x33\xad\x71\xd2\xe9\x7a\xf3\xfd\x07\xf8\xc4\xd0\xfc\x99\x90\xa5\xc8\x69\xe0\x83\xcc\xd0\xf9\xc1\x4c\xd3\x43\xc7\xb3\x36\xbf\x96\x9e\x3e\x58\x46\x27\x5b\x21\xd4\xd0\x50\x0c\xab\x4b\xb2\x25\x4f\xd4\xa0\xf9\x24\xa9\x89\x8e\x99\xa1\x0c\x25\x84\x0d\x82\x2f\x8b\x7f\x56\x60\xef\x2e\x76\x93\x87\xce\x03\xb0\x85\xc5\xad\x4c\xd9\xbd\x36\x45\x66\xd9\x35\x1c\xef\xd2\xba\x26\xb1\x55\x6a\x2f\xb5\xed\xc9\xc1\xcc\x99\xdd\xc4\x5c\xbf\x36\xc6\x10\x1e\x4c\xc9\x04\xf3\x59\x1d\xb8\x63\x32\xf0\x7c\x4d\x50\xeb\x6c\x1f\x90\x6b\xe4\xd6\xfb\x2c\x49\xcb\x72\x28\xea\xf1\xa0\x13\xa4\x33\xe0\x3e\xd0\x8a\x01\x11\x0f\x86\x1a\xbe\x13\x17\xb2\x38\xf1\xf2\xdf\x84\x12\x2d\x2d\xe5\x0d\x41\x9a\x37\x23\x89\x9e\xb5\x64\xc7\x88\xdf\x58\x7c\xfe\x7c\x95\xc5\xf8\x67\xde\xa2\xc0\x68\xa7\x0d\x1b\x73\xef\x61\x79\x10\xab\x77\x1a\xba\x8d\x76\x6b\xdd\x1e\x95\xe4\xbc\x75\x9e\xd7\xad\x48\x7d\xd5\x8e\x4c\x20\x22\x7b\x13\x6b\xb5\xdc\x7f\xbe\xfa\xb1\x8d\x8e\x90\x86\xc2\x27\x20\x09\x8f\x45\x79\xbe\xe9\x47\x53\xf7\x58\x68\x2e\xeb\x9c\xfa\x5b\x4d\x23\xee\xdb\x98\x48\xf8\x3e\xdd\x51\xcf\x99\x82\xe8\x8d\xda\x68\x5d\xde\x94\x2e\x6c\x84\xcb\xfb\x84\x4e\x23\x4b\x12\x45\xf5\xed\x52\x88\x31\x4b\xdd\x71\xba\x97\xd9\xc8\x2f\x49\x9d\x7a\x7d\x1b\xdd\x17\xd9\x91\x55\x50\xb3\x98\x38\x96\x23\x82\x82\xb2\x4f\x82\xa1\xa1\x3f\x0e\x17\x20\x5a\x82\x99\xd2\xf0\x2e\x48\xd3\xb7\xe7\x00\x9e\x4b\x83\xd4\xad\x1d\xa6\x85\xaf\xb8\xc2\xfc\xaf\x19\x6b\x4f\xec\x06\xde\x5c\x91\x24\xac\x6e\xbe\x2b\x7e\x01\xca\x3a\xb9\x5b\x32\x4b\x31\x2c\xfe\xd0\x64\x9a\x65\xe6\x81\xf6\x0d\x1b\x74\x31\x5c\x6c\xef\xa4\xb8\x06\x89\x6f\x15\x9a\xe3\xc2\x14\x22\xb2\x40\x09\x1c\x0f\xde\x20\x9a\x7d\xae\xd3\x99\x3e\xb2\x83\xd5\xb7\xfe\xa1\x40\xf2\x1a\xc8\x09\x1a\x90\xe6\x80\x8f\x90\xd7\x8e\x4d\x20\xb5\xc5\xe4\xd2\x26\x8d\x69\x8c\xf8\x53\x9e\xb4\x0f\x70\xc5\x87\x34\xc6\x72\x0b\xbb\x39\x39\x26\x01\x8f\xad\x51\xaa\xa0\x7c\x00\x1c\x94\x47\xc4\x7b\x3f\x17\x1e\x9e\xf4\x45\xfc\xc9\x2d\x6a\x35\x53\x33\x4b\x8b\x0b\x62\xa0\x63\xec\xc2\xca\x1f\xc2\x78\x42\x1b\x29\x1e\x38\x9c\x10\x25\xb8\xfd\xea\x90\xac\xf8\x67\x6a\xdf\x56\x48\x1b\xd5\x38\xe6\xa9\xdc\xbc\x10\x47\x3e\xa8\x41\xc3\xb1\x8f\xdf\x83\x0c\x00\xdf\x60\xa8\x5f\xf8\x27\x3a\xf1\x89\x11\xa2\x12\x5c\x30\x33\xed\x67\x54\x06\x10\x3b\x69\xfb\x36\x76\xbd\x36\xd3\x0b\xcb\x1f\x64\x10\x15\x03\x66\xd0\x00\x8c\x34\xba\x82\xf4\xc6\xf4\xff\x08\x33\xc5\xe0\x48\x6c\xa8\xe0\x64\xe7\x64\xf7\xc4\xbc\x55\x1b\xfc\x79\x40\xaf\x75\x4b\xb2\xcf\xef\x1f\xc0\x0f\xcf\x79\x02\xd6\xf3\xb8\xbf\xa5\x25\xe8\xd1\xa3\x53\xc0\x32\xd7\x19\xc3\x33\x13\x79\xce\x57\x65\xd1\x76\xd5\x89\x9c\x69\x1c\xc9\xff\x31\x79\x3f\x59\xc8\xd2\x54\xc3\x8f\xf9\x85\xab\x77\x5f\x39\xa6\xe6\xa7\x44\x76\x5b\xb0\xd1\x67\xa0\x4c\x74\xee\xe6\xda\xc7\x60\x5b\x71\x5b\xe5\x1a\x4b\x98\x95\xc0\x4a\x36\x97\xcb\xec\x47\x0a\xfe\x60\x90\x78\xaa\xc3\x2f\x96\xd2\xa3\x15\xa5\xb3\x8d\x55\xbd\x33\x86\xfe\x33\xe0\xef\x3f\x39\xbf\xae\x99\x36\x8e\xbb\x27\x2d\xed\xc5\x0a\xc4\x64\x1d\xc7\xae\xe6\x27\xd5\xf5\x55\xd7\x32\xd4\x75\xde\x89\x56\xc9\x9d\x37\x80\x64\x3a\xe6\x11\x44\x35\xb3\xe9\x50\x30\x5e\x15\xb3\x59\x9d\xa7\x0a\x25\x94\x17\x0e\x7b\xa6\x1f\xf1\x90\xeb\xa9\x94\x52\x94\x9a\x64\x2a\x09\x56\xc0\xd9\xa2\xdf\xa2\xd9\xb2\xc7\xe3\x35\xa1\xd9\xfb\xc2\x74\x95\x23\x06\x5f\x2c\xca\x1d\x6a\xa6\x80\xbd\xf1\xdb\x8b\x9c\x06\x7c\x8d\x70\x57\x6f\x4f\xcd\x6a\xf3\x55\xb5\xcb\x31\x69\x55\xbc\xf6\x47\x36\x2e\xc2\x9f\x17\xef\xf4\xa6\xe8\x5b\xa4\x0c\x50\x51\x5d\x79\xed\xaa\x25\x15\x6c\xb1\x66\xc0\x3f\xb8\x7a\xd4\x52\x22\x19\x9a\x36\x46\xf0\xbd\x21\x06\x9b\x88\xc6\xb5\xaa\xc2\x4a\xfe\x26\xfa\x94\xb9\x41\xc3\xfb\xb0\xda\x06\x6b\xb0\x61\x56\x25\x9a\xdf\x59\x50\xbc\x05\x22\xef\xbc\x5f\xb0\x90\x48\xef\xbe\xae\x48\x96\x5b\x9e\x86\x73\x54\xb9\x04\x50\x78\xa5\xca\x69\xdc\x0a\xfe\xd0\x17\xe1\xce\xe7\xf6\xb9\xef\x0b\x42\xb8\xf6\x11\x6e\x34\x60\x1f\x0e\x28\x6f\x94\xaa\x84\x4b\x6d\x67\x28\x84\x9f\x73\xb4\xcd\x0d\xb1\x94\x1a\xbe\x2f\xa8\xf8\x74\x32\x3c\x5c\xee\xe1\xf6\x91\x4a\x88\xc2\x23\xbf\x55\xf7\x92\xb0\x69\x3d\x33\x13\xf6\x42\xe0\x7e\x1c\x6d\x99\xe8\x67\x5a\x83\x27\x6f\xd3\xab\xcc\x5a\xea\x0c\x38\x15\x17\x14\x72\xe5\xad\x1d\x06\xcc\x31\x64\x64\xe7\xfd\x19\xa7\xe3\x5b\x8b\xe0\xdb\x2e\xe8\x8b\xaf\xbf\x0a\x6d\x95\x76\xb9\xba\x8e\x8d\xc8\x7e\x37\x10\xac\x3b\x3e\xd5\x51\x9a\x43\x25\x86\x29\x6f\x62\x47\xab\x76\xac\x7b\x1b\x41\x8d\x5b\xaa\x6c\x66\xab\xd6\x88\xff\x08\xc4\x20\x2e\xbd\x21\x6a\x02\xde\x2b\x1c\x18\xa7\x29\x88\x3c\x7b\x77\xd2\x83\x19\x39\x55\x1b\x54\xa5\xcb\x16\x88\xd8\xb4\x46\x64\x13\xe8\xd9\xf7\x40\xe4\x93\xee\x75\x14\x5b\x09\x65\x6b\x7c\x55\xcc\x80\xe5\x94\x46\x91\x6d\x17\x2c\xd6\x75\x84\xe9\xee\x4c\x26\xb4\xc7\x3c\xd8\x38\xdb\x78\x30\x10\x37\x36\x55\xd9\x6a\x37\xea\x31\xe2\x08\xa7\x88\xc4\xd7\x58\x6f\xaf\xfc\x9a\x88\x24\xf0\x4e\x8e\x53\x61\x9f\xda\xb2\x0f\x4e\x2d\xd2\x88\x1c\x58\xa6\xbe\x69\x96\x30\x03\xf7\xa8\xa6\x4e\x1d\xe1\xa3\x44\xa3\x26\x18\x0d\xdb\x05\x58\xdc\xd9\x28\xc9\xa7\xe4\xb0\x2a\xec\x7a\x33\xd8\xd3\x8f\x31\x87\xa0\xda\xc0\xef\xaa\x6b\x02\x8a\x5b\xf4\x45\xe6\x9c\x76\x4d\x12\xcf\x2a\x99\xff\x94\x35\x18\x6d\x34\x29\x08\x68\xb7\x5d\xad\xb0\x42\xfb\xd4\xa6\xdf\x8f\xf9\x51\x56\x72\xc8\x7e\xfa\xfd\xc0\xf6\x81\x56\xb8\xcb\x29\xae\x3d\xe2\x6e\xbf\x4b\x4d\x8d\xb4\x91\x5c\x06\xfa\x20\x1e\x4b\x21\x62\xc7\x1b\xbf\x51\xe0\x44\x6b\xec\x5c\xb5\x44\xac\xf1\x3e\x23\xb8\xd8\xbe\x7b\x96\x37\x00\xf0\x24\xd5\x14\xfe\x90\xd2\x3d\xc8\x55\xa2\xfc\x5a\x3e\x37\x42\x92\xe4\x01\xcc\xe2\x10\x50\x12\xa3\x3e\xe8\x67\x99\x7e\x9e\x06\x4e\xd2\x6b\x7b\x08\xf6\xed\xdf\x47\x34\xc7\x44\x0b\x62\xee\x0a\xe5\x0b\xc2\x85\xbf\x4a\x6b\xf5\x18\xb1\xf1\xab\x37\xff\x80\xfc\xec\x1f\xf2\x55\x41\xcf\x4b\xf0\xd4\x0b\x00\xf0\x3e\x9b\x0c\xb6\x0f\x08\x73\x2a\x42\xf2\xd9\xe0\x0f\xac\x05\xea\x3b\xaf\xe7\xe0\xb3\x81\xc5\x9d\xc9\x5a\x42\xa2\x87\xd2\x54\xa4\x9a\xcc\x81\xfe\x15\x85\x42\x5f\xc9\x57\xc6\x72\x23\xda\x47\x9a\xa3\xd0\x76\x63\x14\xfc\x26\x66\xf7\xe7\xcb\x5f\xc8\x7a\xec\x01\x74\x52\x1d\x85\x16\x3f\x1b\xc4\xf0\xa8\x3e\x24\x43\x55\x14\xac\xad\xb4\x66\x05\xbd\xb8\xe1\xf2\x2d\x7c\xcb\xbc\x3a\x08\x70\x68\x84\xd3\x74\x04\x52\xe0\x00\xf3\xbc\xab\x49\x19\xa5\x21\x90\x1b\x14\xe5\x4d\x7b\x69\x36\x37\xb1\x8e\xd8\x1b\x08\x5b\x64\xff\x65\x60\xbd\x15\x0d\x63\x7c\x10\x95\xb0\x60\xd7\x95\x3e\x39\x13\xc2\x2d\xde\xbf\x47\x7b\x7a\x60\x2b\x93\x3c\x9c\xc7\xf4\xca\x02\x31\x12\xd1\x7a\x8f\xf4\xff\xa9\x7e\xed\x8b\x31\xe5\x57\x85\x01\xa0\x3a\x59\xdf\x8f\xdf\x0d\x64\xd3\xc0\x73\xf3\x74\xd4\x8e\xa0\x27\x6b\xba\xfc\xc3\x7f\xb4\x6f\xfd\x81\x77\xda\x61\x48\x7a\x1c\x35\xe3\x9d\x71\xc9\x48\x98\x7f\xf9\xd2\xdb\x64\x65\xbf\xa8\x05\x61\x08\xdf\x30\x23\x1f\x93\xf9\x44\x10\x00\xa2\xe4\x90\xb3\xbb\x5e\x45\x9f\xcf\x22\x90\x75\x5d\x53\x0a\x39\x8f\x9e\x99\x49\xf7\x85\x51\x82\xb1\x64\xfb\x2d\xcc\x0e\x8b\xe6\x7c\xdc\xc7\x16\x5f\xcb\xb1\xb1\x4b\xbc\x8e\x4d\x87\x49\xdb\xee\xf2\x7c\xfe\x28\x66\x7c\x6c\x90\xf9\xe7\x7f\x43\x59\xb1\x6a\x65\x70\x35\xcb\xbc\x34\xc4\xee\x1c\x83\x2d\x97\x40\xaf\x4a\xf1\x9b\xdd\x2d\x19\x78\x74\x7a\x3a\xe6\x53\xb9\x74\x56\x69\x1a\x01\xad\xe5\x3d\x0e\xc0\xe2\x88\x5d\xed\x29\x4e\xe8\x83\x63\xc9\x88\xda\xc4\x2c\xa5\xc5\x7f\x26\xae\xee\x11\x71\xae\x8e\x71\xfc\x07\x4c\xc4\x84\x5c\xb1\x19\x9b\x0c\x07\x2d\x43\xb3\x71\xa1\x76\x8e\xe9\x94\x52\xc0\x23\xcb\x33\xa8\x94\xed\x87\x2f\x10\x1e\x99\xfc\x0a\xc4\x00\x0f\x1d\x05\x81\xa0\x9f\x95\x5a\x23\x52\x07\xf5\xa6\xd5\x45\x84\x5e\x09\x89\xcf\xab\x92\x7f\x6c\x25\xcd\x7b\xe3\xce\xfb\x5f\x58\x2b\x10\x90\xef\x50\xf7\x3e\x5a\xec\x12\xf5\x42\x22\x4c\x9a\x4e\x0d\x80\x39\x8f\xd7\x2c\xde\x9a\x1e\xb1\xb2\xd7\x0c\x51\x25\xdb\x87\x00\xf2\x6b\x90\x2b\x70\x52\x85\x8c\x04\x91\x77\xbb\x12\xc4\x02\xfc\x2c\xa1\x2b\x10\x43\x27\x29\xe2\xdf\xe6\xb7\xb5\x4a\xfc\x41\x19\x75\xfe\xcc\xaf\xb2\xb0\x9d\x8f\xaf\x4a\x51\x6c\xfe\x4f\x4a\x99\x15\x68\x29\xdf\x0e\x91\x69\xce\xdb\xe0\x2b\xa4\x35\x51\x5d\x1c\x05\x44\x25\x58\xb4\x56\x22\x26\xd5\x12\x32\xa9\x59\x87\x2f\x97\xb6\x24\xab\x11\x4b\x06\xcc\x02\xb8\x2c\x9c\x47\xd2\xd2\xcb\x6a\x0e\x9c\x4d\x1f\x9a\x2d\x6e\x76\x06\xe1\x2b\x43\xa5\x45\xd9\xbf\x05\xd1\x4d\xff\x14\x1b\x3c\xa1\xf9\x39\x84\x8e\xb9\xed\x26\xae\xad\xa1\x95\xdf\x6b\x4b\x6b\x36\x4b\xf7\xb1\x39\x60\x3a\x17\x51\x24\x96\xb3\xe4\x94\x7c\x7d\x70\x80\x53\x0c\x81\x4e\xa9\xc4\xc1\x11\x31\xf9\x53\xe0\x87\xc0\x44\x0b\xc9\x64\x77\x7e\x8b\xf5\x38\x9b\x8d\xfb\xc9\x72\x59\x0e\x1b\x70\xee\x2c\x1b\xff\x64\x88\x22\x50\xab\xaa\x20\x34\x51\x2e\x1b\xa0\x2b\xbd\x5d\x05\x74\x2c\xe3\xbf\xae\xea\x7a\xae\x14\xb7\x81\xe9\x9b\x46\x77\x85\x20\x7a\x7b\x8f\xd4\xa9\xc3\xb2\x83\xb9\x8e\x42\xe3\x41\xaa\x3e\x44\x6a\x3a\xa7\xf5\x16\x4c\x4b\xe1\x4e\x0f\xea\x70\xa8\x84\x3f\x3f\x38\xda\x36\x7c\xdb\xae\x44\x9a\xdc\x1f\xb7\x55\x0f\x2a\x61\x25\x6f\xf7\x49\x41\x84\xe3\x46\x55\x25\xec\xbc\x4a\xa8\xf3\x2e\x44\x8f\x50\x51\xeb\x08\x10\xc4\x59\x3f\x37\x15\x52\x50\xc1\x65\x33\xfc\xf1\x02\x05\x2a\x59\x19\x21\x64\x93\x3b\x7c\x4a\x70\x86\xd3\x48\xf3\x6e\xf5\xc1\xd9\xd0\x21\x32\xf8\x29\x2c\x62\x8f\x7a\x64\x75\x22\x81\x2d\xe8\xb9\xde\xd7\x7f\xaf\x41\xe3\x04\x85\x5b\x7a\x2d\x93\xab\x89\x79\x09\xaf\xf7\xe3\xbe\xa0\xbb\x56\xbf\xc5\xe2\xb3\x3d\x2e\xfb\x46\x7b\x3c\x45\x27\x09\x4d\x36\xd1\xf7\x17\xf2\xe4\x5f\x95\x34\x28\x0b\x29\x56\x56\x10\x65\x54\x04\xac\x3b\x2f\xf2\x56\x9f\xb6\x9e\x05\x3e\x24\x86\x8e\xf8\xd1\x69\x43\x20\x71\x0e\x67\xfe\x17\x38\x49\x2b\xff\xa0\x1d\xd4\x66\x3c\x58\x3c\x93\xae\x3a\x3b\xce\x45\xb2\x19\x73\xd7\x8c\xbe\x35\x81\x1a\xbd\x79\xea\xcd\x78\xdf\x0d\xc8\xf2\xcb\x95\x56\x1a\x2d\xe7\xcf\xd8\x19\xd0\x74\xe6\x92\x22\x27\x7e\x3e\x56\xfd\xcc\xf3\x81\x0e\x8e\xf4\x60\xc0\x16\x67\xee\xa5\xad\x42\x08\x46\x03\x80\x95\x75\x21\x38\xa7\x30\xae\x6a\xa8\x80\x36\x4e\xc9\x4f\x59\x43\xb0\x6f\x80\x2d\x16\x59\x0b\xeb\x14\xf7\xc8\x23\xaf\x42\x7b\xb6\xdc\x5c\xac\x4e\x17\x6d\x9c\x21\xfe\x7e\xd6\x0d\x8f\x62\xdb\x93\xca\x1c\xf2\x52\x01\xc4\xad\x71\x87\x19\x51\x7a\x24\x3d\x27\xf4\xda\x0b\xbc\x7a\xd5\xdb\x35\x32\xfe\xeb\x23\x5c\xe5\xc3\x41\x29\x77\x8d\xbd\xc1\x4c\xa0\xd3\xb1\xc2\xe8\x9b\xf3\x42\x79\x85\x8f\x79\x88\x80\xdf\x1c\x28\xf6\xaf\x61\x14\xde\x75\x5d\x63\x5d\x04\x3b\x4d\x8c\xb1\x7c\x97\x90\x29\x21\x7a\xa2\x44\x51\x19\x00\x00\x14\xcb\x02\xf7\x8a\xcd\x73\xe7\xee\x5a\x10\xdb\x06\x94\xb6\x8e\x9c\x21\x4d\x83\x06\x96\xbd\x9f\x54\x93\xed\x34\x3b\xdc\x43\x8d\x2f\xf4\xb7\x69\x39\x8b\x0b\xc8\x17\x73\x0f\x17\x44\x1f\xd4\x61\x42\x60\xf9\x74\xc1\x8e\xe8\x07\x23\x24\xc1\x78\xa8\xc3\x6f\x48\x47\x6b\x9e\xa6\x0d\x65\x86\x7b\xe8\x81\x01\x7a\x6c\x17\x8e\x1a\x4c\x9a\xb2\xf4\xff\xf0\xcd\xd5\x78\x16\x49\x43\x87\x75\x75\x69\x6f\x10\xb5\x22\x9a\x7e\x15\x29\x56\xf9\xed\xee\x2b\xf6\x44\xf9\x22\x2a\x8d\xc8\xc0\xe5\xf0\xc5\xa9\x64\x75\xca\x0a\x16\xc5\x3e\xa8\xde\x12\xf0\xea\x35\xd9\xc7\xec\xa9\x77\x01\xb6\x77\x5d\xdc\xc5\xa9\x39\x61\xb1\x4c\xc2\x33\xbf\xa3\xce\xfe\xe2\xd0\xd2\x3c\x23\xe5\x84\x71\xe9\xee\x63\x80\x0f\xb7\x61\xa4\x4f\xe3\x5d\xf6\x83\xc7\xef\x94\xb2\x52\x60\x05\x64\x59\x6e\xe3\x6a\xfa\x1b\xdc\x60\xd8\xd6\x58\x8d\x1a\xff\x7d\x0b\x92\xcb\xa1\xc5\x34\x4f\x6e\x6b\xbc\x51\x67\x2c\x71\x2a\x68\x31\xee\x47\x4d\xbf\x3c\x4b\x63\xb2\xd6\xff\xea\x17\x42\x59\xfd\xe0\x37\x67\xa2\x70\xd3\xc8\xd5\x49\x90\xbd\xd8\xa4\x36\xfd\xeb\x49\x43\x40\x23\x07\x57\x52\xed\x59\x10\x52\x77\x2a\x22\xfc\xcf\xba\xbe\x41\x97\x93\xdc\x01\x40\x55\xba\xaa\x4e\x0d\x49\x53\x63\xae\x7e\xa8\xad\xe1\xdf\x34\x1d\xbc\x94\xcc\xe6\xa6\x96\x2e\xa9\x92\x52\x8f\xda\x80\x7c\x07\xda\x9d\x4d\x72\xe3\x56\xef\xf1\x45\x7c\xae\x8f\x68\xcb\xe6\xf9\x8d\xd6\xe0\xae\x30\x31\x2a\x06\x64\x6c\xbf\x5c\x47\x97\xad\x98\x87\x7c\x35\x75\x35\x12\x1a\xe1\x3b\xee\x7e\xfa\x17\x74\xf6\xfb\xdd\xb2\x49\x45\xcd\x0d\xd0\x07\x99\x47\xc4\xac\xa0\x4a\xd7\x3c\x18\xea\x18\xf9\x42\xa1\xde\x0e\x76\xe9\x17\x3c\x0e\xb5\x2d\xf5\x23\x88\xa0\xd9\x93\x35\xbf\x79\x6a\xa7\xb7\x92\xe1\x13\x53\xba\x34\x0c\x4f\x18\x2e\x53\x84\x9b\xd7\xd8\x59\xe0\x8e\x48\x54\x73\x36\x12\x76\x68\x9d\x8b\x45\xcd\x9b\xab\x15\x92\x74\x6b\x50\xcb\xf1\x11\xde\x5a\x56\x77\xed\x23\xb4\x2c\xaf\x18\xe7\x4c\x65\xe1\x86\x63\x5c\x18\x4a\x48\x2c\x34\x32\xdf\xd6\xa6\x57\x08\x7a\x2f\x4d\xb7\xeb\xdf\x8c\xb8\x46\x5a\x7c\x2a\xe3\x23\x2b\x40\xdb\x6a\xa6\xa4\xbf\xc6\x6f\x2a\x36\x5c\x71\x9d\x56\x07\x34\x52\x9d\x34\x83\xce\xa2\xa0\x54\x6f\x2d\x7a\x0a\x9c\x97\x46\xc0\x8d\xe1\x95\x37\x74\xff\x10\xb4\xa5\xe5\xe4\x48\x5e\x1d\x90\xc2\x63\x42\x86\x83\x85\xd6\x1c\xda\xd6\x63\x42\x7a\xaa\x86\x0b\x2c\xef\x77\xb3\x4e\x5e\xa7\x3f\x6c\x1f\xce\xc9\xc4\xb0\x8e\xed\xa9\xbf\x04\x5a\x1c\x74\x10\xcc\x6a\x76\x4d\x97\x31\x56\xa2\x14\x36\xfb\xcf\x03\x78\xa1\xc3\xda\x82\x1b\x45\xc0\x85\x2c\xa1\x58\x36\x73\xc1\xdb\x0e\x2a\xa0\x30\xba\x48\x44\x40\xe7\xc5\x20\x0d\x29\xac\x02\xf2\xbf\x41\x77\xaa\xa5\xe6\x5c\x9e\xe8\x8a\x21\x30\x44\x4c\x2e\xb5\x2a\xed\xdb\xc5\xcc\xad\xed\x1a\xcb\x30\x79\x48\xa8\x98\xa3\x21\x09\xd9\x98\xdc\x53\x9c\xcc\x79\xcb\xe1\x8b\x4c\x28\x0c\xf9\x26\xb7\x50\x5d\x07\x59\x8f\x9d\x6a\xed\x36\xd0\xfd\x07\x19\x0b\x10\x82\xbb\x15\x53\x98\x60\x74\x35\xa0\x2b\x5a\x4b\x4c\x4c\x6e\xeb\x7e\x04\x2b\xbf\x63\x9e\x44\xa7\xfd\x3a\xda\xfa\x8d\x5c\x04\x87\x0a\x9c\x31\xeb\x59\x01\xf0\xc6\x81\xbd\x8b\x44\xdb\x91\x98\xd7\xb3\xda\x55\x64\x55\x13\xe3\x6f\xb5\x4d\x4d\xfa\xb8\x14\x1d\x58\xb6\x97\x02\x8a\x69\xa0\xe6\xb8\xca\xd3\x8b\x1e\xb9\xb2\xf4\xb5\xae\xa4\x2a\x0b\x1f\x1c\xb7\x27\x40\x31\xcf\x02\x3e\x47\x38\xb0\xd7\x49\xdb\x48\x61\x7d\x8d\x1e\x99\x84\x93\x1b\x7a\xe7\xaa\x87\xfa\x22\xec\xd3\x54\xae\x82\x64\x92\xc9\x05\x55\x16\xc6\x9c\x54\xc4\xcf\x08\xa2\x9a\x50\x90\xfe\x7e\x74\xf2\xdc\x21\x31\xd4\x2a\x52\xf7\x94\x21\xbf\x46\xb1\x83\x2e\x83\x0c\x43\xc0\x91\xc7\xc5\xee\xd3\xbc\x22\x34\xdf\xa6\x43\x1c\x8a\x3d\x10\x79\x53\xe3\xbc\x6f\xa3\x2f\x27\xf9\x86\x93\xbc\xd9\x0b\x5d\xb7\xd2\x9c\x86\xeb\x7a\xe0\x2b\x53\xd8\x22\x49\xd5\x81\x7f\x2f\x85\x5e\x4d\x92\xa8\x44\xbe\x66\x47\xcb\x2d\xa6\x88\x79\x12\x20\x6f\xd9\xaf\xe3\x96\x9b\x4e\x13\xaf\x99\x18\xb1\x13\xe3\x36\x48\x37\x7d\xcf\x45\xfb\x9a\x3d\x3d\x19\x4e\x78\xf0\x10\xfb\x80\x7e\x90\x09\x26\xe8\xcb\x8a\xa3\x06\xcb\x8a\x93\x64\x01\xd3\x45\x58\x1c\xb5\x69\x93\xd7\x44\x2a\x77\x00\x3c\xaf\xac\xc2\x96\x76\xba\x24\x06\xea\xfe\x04\xf5\x41\x20\x8a\x4d\x69\x60\x00\xf8\x36\xfb\xa4\xca\x56\x15\xb9\x01\x45\x5b\xfd\x55\xc7\x73\xd3\xdf\x03\x59\x87\xdb\x7b\xc2\x51\x2d\xac\x79\xc0\x48\x27\xf9\xe9\x1f\xcc\x3d\x51\xba\x91\x3c\xf6\x82\x77\x35\xc2\xee\x71\xdb\x06\x60\x2b\xb1\xea\x4b\x80\xb0\x72\x3a\x87\x33\x79\x9c\xa9\x06\x47\x97\xf3\xa5\x99\x57\x23\x31\xa4\x76\x8f\xdc\x90\x36\xee\xed\xdb\x09\xf9\xe7\x7f\x2d\xf6\xb3\xe3\x09\x11\x38\xe1\x67\x44\x8e\x01\x73\x6f\x9f\xc7\x82\x71\xa2\x4f\xc6\x6e\x47\x26\xed\x32\x2a\xbc\x6b\xb8\x1f\xe3\x3f\x88\xcf\xbb\xde\xf9\x37\xdf\x8c\x42\xda\xe5\xb4\xb5\xa7\x25\x3a\x9a\x76\x00\xa5\x33\x41\x9a\xfb\xf7\xe7\x15\x24\x9f\x49\xfa\xb2\x69\x19\xd6\xcf\xe2\x4f\xa8\x3f\x31\xd9\x9d\x5f\x2d\xc3\xa5\x68\x7a\xf1\x39\xf1\x05\x2e\x35\x43\x53\x65\x13\x87\xca\x77\x2c\xa9\xf3\xcc\x3f\xf4\xc0\x16\x33\xca\x33\xb3\x9b\xa0\xaf\x35\x26\xce\x17\xdb\x19\x91\xc8\xe7\x04\xed\x71\xae\xb9\x56\x69\x94\x2e\xea\x56\xbb\x39\xe5\x83\x49\x78\xf9\x1f\xbb\x8c\x9e\xf2\x58\xaa\x66\x27\x22\x3b\x9d\xd1\x7d\x5a\x40\x6e\x25\x9a\x8a\x94\x6f\x6b\x9b\x85\x6c\x15\x90\xcc\x46\xb8\x83\xc9\x3a\xdc\x06\xcc\x48\x7b\x6c\x77\xd8\x3d\x97\x19\x55\x67\xb4\x66\x3e\x1b\x25\xb2\x65\x18\x06\x7f\x42\xc3\xa9\xa7\x46\x90\x1a\x93\xf4\x96\xf4\x44\x54\x80\x9a\x8c\xc5\x50\x9e\x32\x77\x1d\x7c\x7b\xc7\x9f\x73\x74\x74\x26\x66\x0a\x9a\xf0\x8d\x28\x2f\x7a\xef\x3e\x6d\x8a\xab\xb0\x21\x1d\xef\x65\xb7\x69\x1a\x6f\x4a\x1f\x7d\xb7\x43\x08\x59\x60\xba\xc3\x22\x28\x8d\x4d\xaa\xd8\xbd\x05\xe7\xf2\x5b\x08\xa2\xb5\xba\x71\xb3\x4b\x15\xa7\x51\x5b\x73\x68\x65\x9c\xe6\x46\xdd\xc3\x98\x3f\x23\xf0\xe0\xd1\xbf\x84\x34\xc6\x3f\x58\xd9\xf0\xc4\xb1\xcf\x84\x99\x95\x61\x91\xeb\x9a\xb6\x97\x5b\x43\xb0\xb5\xef\x61\xc6\x6b\xe0\x83\xc1\x94\x1d\xd6\xdf\x33\x44\xfe\x51\x81\x63\x9b\xf2\x90\x2a\x9b\xfc\xca\x81\x6e\xea\x7e\x29\x66\x34\x6b\x8e\xed\xe1\x43\xe3\xc7\xa1\x1a\x31\xf6\xf0\xc0\x79\x2b\xf9\x74\x31\xca\x5a\xb6\xa3\xb3\xfa\x2f\xb6\xf9\x17\x4c\x63\x45\xff\x96\x2a\xa2\x4e\x92\x54\xb7\xe9\x57\xec\x19\xe8\xfd\xe1\x96\x37\xdc\xa6\xd8\x64\x7d\xd7\xfe\x9c\x17\xba\xc3\xc9\x91\x18\xb5\x0d\x37\x44\xc7\x96\x38\xb2\x6b\x96\x81\x21\x8f\xdb\xb0\x1c\xf8\x49\x15\xba\x5c\xfc\xa8\x41\xa7\x76\x2c\xb6\x70\x6a\x4f\x17\x94\xc7\x4c\x85\x23\xe0\x0f\xe3\xe1\x41\xf4\x96\x2f\x77\x75\x61\xf1\x7f\x24\xb6\xef\x25\x28\x8c\x11\xc7\xf4\x5e\xf2\x8f\x2b\xe2\xe2\xba\x5e\xb1\x30\xaf\x23\x1e\x66\xd7\x95\x54\x5f\x33\xc9\x75\xe0\x63\x76\x84\x77\xdf\xca\x8d\x22\x2e\xb8\x68\x4d\x49\x28\xe5\xbf\x12\xb3\x3f\xca\x06\x4e\xd5\x0b\x1f\xfe\x02\xb6\x08\x51\xe3\x64\x0e\xc0\x77\x68\xdc\xd2\xef\x7f\xee\xce\x25\xd5\x32\x77\x8c\xf4\x06\x0e\x7f\xdb\x86\xc0\x7f\x33\xcc\x63\x11\x8f\xd5\xe8\xc8\x2d\x21\x34\xbe\x18\x93\x19\xce\x0d\xb9\xac\x53\x05\x17\x98\x2d\x04\xff\xd0\x63\xb4\x77\xce\xc2\x58\x87\xf5\x90\x01\x5a\xf9\xcb\x99\x10\x79\x0e\x4c\x12\x92\x0c\xdd\x0c\x98\x66\x29\xde\x8b\x67\x0e\x3b\x9a\x6e\x1d\xe2\x9b\x1e\xab\x00\x7d\x32\xd0\xd6\x37\xbb\x8a\xe4\x31\x5a\x51\xa6\x97\x8f\xdf\x45\x0b\x5b\xef\x93\x7b\x64\x16\x3c\x33\x40\xe2\xae\x73\xe7\x56\x5a\x13\x30\x52\xe9\x0f\x73\x1c\x05\xf4\x97\x81\x57\x86\x8b\x51\x04\x51\x75\x50\x63\x27\x0a\x37\xe6\xb4\x74\x87\xac\x56\xe2\x44\x71\x14\xbb\x56\x1a\x9f\xd7\x8a\x7e\xfe\x77\x52\xbf\x11\xbb\x58\x39\x27\x69\x1b\xfe\x21\x1a\xe7\x0e\x71\xcd\x3e\x6b\xd5\x4d\x24\xc5\xc3\xcc\x38\x02\xb2\xe7\x10\xdb\xa1\xee\x74\x5f\xbd\xcf\x69\xd7\x4c\x0b\xd2\xea\x80\x01\xf6\x1f\x45\xed\x3b\x19\x85\x32\x88\x9d\xd5\xec\x86\xe8\x44\x33\xa2\xe7\x0d\x2e\x2b\xbe\x81\x89\x4c\x9c\x97\x45\x0f\xb0\x7f\x6b\xba\xc4\xf1\x40\xbb\xfb\x50\xff\x49\x46\x69\xbb\x26\x96\x35\xc4\xa0\x83\xc5\x4f\x73\x96\xc6\x73\x11\xfd\xf0\xcf\xd8\xeb\x75\xf5\x29\x58\x8c\xf0\xfb\xd0\x8e\xb4\xe1\x90\x5b\xdb\x66\x2e\xbc\xad\x9b\xf4\x8d\xbe\xc0\x66\x76\xe2\x06\x5f\x19\x04\x3a\xf1\xbd\xdf\xfa\x74\x30\x9b\x5b\x59\xc6\x3a\x42\xa1\x4d\xe4\xe5\x2c\x78\x8a\x7c\x83\x61\x5d\xf7\x85\xa6\x03\x1a\x19\x61\x5e\x01\x1f\xf5\x4d\x44\x78\xa3\x17\x4a\x20\xb3\x8d\xf6\xf8\x0b\xd2\xe7\xed\x0d\x0a\xfa\xd3\x38\x48\x47\xd2\x96\xd4\xa2\x5b\x34\xe8\xae\x48\x39\x9f\x2a\xa9\x83\xc0\xb8\x3d\xa0\xe0\x09\xa3\x00\x91\xc0\x19\x44\x67\xa3\x5a\x2f\xdc\x72\x58\xa4\xce\x91\xcf\x50\x2e\x4a\xb5\xd8\x72\xf7\x31\x0f\x01\xfc\x6e\x0a\x31\x48\xf8\x8f\xf8\xd7\x4f\x8f\x82\x66\x99\x50\x88\x13\x45\xf9\x42\x2e\x7a\xa5\x25\x1a\x92\xd8\x1d\x4c\x70\x82\x47\xd6\x9b\x7e\xb7\x97\xd8\x6f\x10\xd2\x43\xa1\x39\x8c\x2b\xe2\x60\x34\xd2\x7b\xee\x84\xb6\xbc\x5f\x27\xa2\x38\x0a\x21\xbd\x3d\x78\xd2\xb3\xff\xe3\x2f\x0d\x68\xb7\x91\xb6\xe0\xba\x56\xdc\x78\xb1\xd2\x65\xcd\x15\x44\xaf\x86\xa0\x9b\x5f\xac\xf0\xf5\x26\x4e\x00\x46\x61\x4e\x56\x95\x8b\x0f\xdf\x17\x39\x5d\x60\x41\x40\x9c\xb4\x8f\xd1\x22\x1a\xbb\x12\xe2\x80\xe2\x24\x75\x0f\xc0\xb8\xa6\xfa\x7a\xc0\x19\xbf\x08\x70\xa9\x77\xb3\xf1\x87\x67\x06\xd7\x0c\x33\x9d\x1c\xc3\xc8\x8d\x21\x0b\x6d\xb5\xc6\xfc\xd4\x9b\xc3\xc9\x82\x71\xb2\x7b\xab\x70\x35\x4a\xa6\x78\x4b\xb6\x1e\xf8\xd5\x93\x4e\x3a\xb9\xe9\xe4\xdd\x1e\x76\x73\xa7\x33\xaa\x0b\x4f\x69\x7d\xec\x6a\xc2\x72\x2a\x1d\x8a\x9b\xb2\x9d\x7f\x00\xee\x4f\x03\x1f\x2f\xc7\x0f\x78\x69\xfe\xab\x83\x54\x38\x0f\x21\xdc\x67\x3e\xc5\x2e\x15\x0c\xae\x10\x1a\xfa\x7d\xf7\x49\xf0\x11\x7e\x26\x79\xa1\x0c\x25\xf5\x2a\x3e\x48\x12\x94\x93\x34\x42\xb4\xdd\xa8\xd8\xf2\x04\x8e\x6b\xb4\x10\x86\xbb\x74\x77\x2b\xf6\x8a\x59\x32\xd9\xc4\xf0\x6f\x52\xaf\x4e\x62\x24\x27\x39\x2a\x55\xa9\x33\xf6\x70\x5e\xbe\x89\x08\x92\xc3\x3d\x42\x2a\x88\xaa\x96\xe5\x55\xc0\x2a\xf4\x76\x05\xed\x96\x75\x8c\x28\xc6\xd8\x5c\x0d\x53\xd6\x24\x59\x55\xa4\x8f\x0b\x26\x7c\x11\x42\xce\x23\x15\xae\x0d\x48\x9d\x41\x56\x1a\x55\xdf\x3a\x4a\x72\xea\x6a\x7c\x8f\xb8\x2d\xe5\x03\xce\xca\xee\xa5\xa7\xd9\x35\x33\xaa\x40\x45\xdd\x14\xe0\xc4\x94\x7a\xca\xb5\xcd\xbf\xf0\xa5\x4f\x29\x08\xe7\x02\x7e\x4e\x3a\x55\x09\x3a\x9d\x95\xfb\xb2\xd1\x6d\xb4\x1b\xb5\x4c\xaa\xf7\xde\xb1\xc7\xc5\xb5\x6f\xb5\xf7\x75\x78\x39\x91\xbc\xe9\xb3\x95\x2a\x6c\xb9\x82\xcf\x8a\x90\xf3\xd2\xbe\xcb\xd7\xf3\x05\xb9\x57\x51\x4a\xe3\xfb\xf0\xad\x1a\xbb\xd5\x78\x8c\xb4\xec\xfd\x52\x8d', 2)
| 13,587.666667
| 40,703
| 0.750141
| 10,178
| 40,763
| 3.002948
| 0.026037
| 0.003926
| 0.003828
| 0.003141
| 0.001276
| 0.000785
| 0.000785
| 0
| 0
| 0
| 0
| 0.313916
| 0.000245
| 40,763
| 3
| 40,703
| 13,587.666667
| 0.436066
| 0
| 0
| 0
| 0
| 0.333333
| 0.997596
| 0.997596
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
0584a506216c6549c1b0b73d71103b00c028ba31
| 38,953
|
py
|
Python
|
datahub_client/apis/dataset_api.py
|
amkimian/mimir_python
|
994c1542437fa6bd1d0e53b0c0c4c8f692575374
|
[
"Apache-2.0"
] | null | null | null |
datahub_client/apis/dataset_api.py
|
amkimian/mimir_python
|
994c1542437fa6bd1d0e53b0c0c4c8f692575374
|
[
"Apache-2.0"
] | null | null | null |
datahub_client/apis/dataset_api.py
|
amkimian/mimir_python
|
994c1542437fa6bd1d0e53b0c0c4c8f692575374
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
DataHub API
DataHub API
OpenAPI spec version: 0.0.11
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class DatasetApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_data_set(self, user_id, body, **kwargs):
"""
Create a new data set, associated with the given user id
This creates a new data set that can then be added to
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_data_set(user_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The id of the user that this dataset is associated with (required)
:param DataSet body: DataSet object that defines the element (required)
:param str api_key: The user api key
:return: GeneralStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_data_set_with_http_info(user_id, body, **kwargs)
else:
(data) = self.add_data_set_with_http_info(user_id, body, **kwargs)
return data
def add_data_set_with_http_info(self, user_id, body, **kwargs):
"""
Create a new data set, associated with the given user id
This creates a new data set that can then be added to
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_data_set_with_http_info(user_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The id of the user that this dataset is associated with (required)
:param DataSet body: DataSet object that defines the element (required)
:param str api_key: The user api key
:return: GeneralStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'body', 'api_key']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_data_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `add_data_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_data_set`")
resource_path = '/datasets/{userId}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
header_params = {}
if 'api_key' in params:
header_params['api_key'] = params['api_key']
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GeneralStatus',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_data_set(self, user_id, data_set, **kwargs):
"""
Remove a data set and all releases and elements
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_data_set(user_id, data_set, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The id of the user owning this dataset (required)
:param str data_set: The id of the dataset (required)
:param str api_key: The user api key
:return: GeneralStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_data_set_with_http_info(user_id, data_set, **kwargs)
else:
(data) = self.delete_data_set_with_http_info(user_id, data_set, **kwargs)
return data
def delete_data_set_with_http_info(self, user_id, data_set, **kwargs):
"""
Remove a data set and all releases and elements
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_data_set_with_http_info(user_id, data_set, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The id of the user owning this dataset (required)
:param str data_set: The id of the dataset (required)
:param str api_key: The user api key
:return: GeneralStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'data_set', 'api_key']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_data_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_data_set`")
# verify the required parameter 'data_set' is set
if ('data_set' not in params) or (params['data_set'] is None):
raise ValueError("Missing the required parameter `data_set` when calling `delete_data_set`")
resource_path = '/datasets/{userId}/{dataSet}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'data_set' in params:
path_params['dataSet'] = params['data_set']
query_params = {}
header_params = {}
if 'api_key' in params:
header_params['api_key'] = params['api_key']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GeneralStatus',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def find_data_sets_by_tags(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_data_sets_by_tags(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str api_key: The user api key
:param list[str] tags: Tags to filter by
:param int page: Page to return (defaults to zero)
:return: list[DataSet]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.find_data_sets_by_tags_with_http_info(**kwargs)
else:
(data) = self.find_data_sets_by_tags_with_http_info(**kwargs)
return data
def find_data_sets_by_tags_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_data_sets_by_tags_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str api_key: The user api key
:param list[str] tags: Tags to filter by
:param int page: Page to return (defaults to zero)
:return: list[DataSet]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'tags', 'page']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_data_sets_by_tags" % key
)
params[key] = val
del params['kwargs']
resource_path = '/marketplace/getByTag'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'tags' in params:
query_params['tags'] = params['tags']
if 'page' in params:
query_params['page'] = params['page']
header_params = {}
if 'api_key' in params:
header_params['api_key'] = params['api_key']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DataSet]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def find_user_data_sets(self, api_key, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_user_data_sets(api_key, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str api_key: The user api key (required)
:param list[str] tags: Tags to filter by
:param int page: Page to return (defaults to zero)
:param bool subscribed: If true, also return subscribed data sets
:return: list[DataSet]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.find_user_data_sets_with_http_info(api_key, **kwargs)
else:
(data) = self.find_user_data_sets_with_http_info(api_key, **kwargs)
return data
def find_user_data_sets_with_http_info(self, api_key, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.find_user_data_sets_with_http_info(api_key, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str api_key: The user api key (required)
:param list[str] tags: Tags to filter by
:param int page: Page to return (defaults to zero)
:param bool subscribed: If true, also return subscribed data sets
:return: list[DataSet]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'tags', 'page', 'subscribed']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_user_data_sets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params) or (params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `find_user_data_sets`")
resource_path = '/user/getDataSets'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'tags' in params:
query_params['tags'] = params['tags']
if 'page' in params:
query_params['page'] = params['page']
if 'subscribed' in params:
query_params['subscribed'] = params['subscribed']
header_params = {}
if 'api_key' in params:
header_params['api_key'] = params['api_key']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DataSet]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_data_set_by_id(self, user_id, data_set, **kwargs):
"""
Find a dataset for a user and a dataset
Returns a data set
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_data_set_by_id(user_id, data_set, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The id of the user owning this dataset (required)
:param str data_set: The id of the dataset (required)
:param str api_key: The user api key
:return: DataSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_data_set_by_id_with_http_info(user_id, data_set, **kwargs)
else:
(data) = self.get_data_set_by_id_with_http_info(user_id, data_set, **kwargs)
return data
def get_data_set_by_id_with_http_info(self, user_id, data_set, **kwargs):
"""
Find a dataset for a user and a dataset
Returns a data set
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_data_set_by_id_with_http_info(user_id, data_set, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The id of the user owning this dataset (required)
:param str data_set: The id of the dataset (required)
:param str api_key: The user api key
:return: DataSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'data_set', 'api_key']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_data_set_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `get_data_set_by_id`")
# verify the required parameter 'data_set' is set
if ('data_set' not in params) or (params['data_set'] is None):
raise ValueError("Missing the required parameter `data_set` when calling `get_data_set_by_id`")
resource_path = '/datasets/{userId}/{dataSet}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'data_set' in params:
path_params['dataSet'] = params['data_set']
query_params = {}
header_params = {}
if 'api_key' in params:
header_params['api_key'] = params['api_key']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_front(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_front(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page: Page to return (defaults to zero)
:param int limit: The maximum amount of records to be returned (the size of the page)
:return: list[DataSet]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_front_with_http_info(**kwargs)
else:
(data) = self.get_front_with_http_info(**kwargs)
return data
def get_front_with_http_info(self, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_front_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page: Page to return (defaults to zero)
:param int limit: The maximum amount of records to be returned (the size of the page)
:return: list[DataSet]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_front" % key
)
params[key] = val
del params['kwargs']
resource_path = '/marketplace/getFront'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'limit' in params:
query_params['limit'] = params['limit']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DataSet]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_my_data_sets(self, api_key, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_my_data_sets(api_key, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str api_key: The user api key (required)
:param int page: Page to return (defaults to zero)
:return: list[DataSet]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_my_data_sets_with_http_info(api_key, **kwargs)
else:
(data) = self.get_my_data_sets_with_http_info(api_key, **kwargs)
return data
def get_my_data_sets_with_http_info(self, api_key, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_my_data_sets_with_http_info(api_key, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str api_key: The user api key (required)
:param int page: Page to return (defaults to zero)
:return: list[DataSet]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'page']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_my_data_sets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params) or (params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `get_my_data_sets`")
resource_path = '/marketplace/getMyDataSets'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
header_params = {}
if 'api_key' in params:
header_params['api_key'] = params['api_key']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DataSet]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_data_set(self, api_key, owner, dataset, body, **kwargs):
"""
Update an existing data set.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_data_set(api_key, owner, dataset, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str api_key: The user api key (required)
:param str owner: The id of the user that this dataset is associated with (required)
:param str dataset: The data set id to update (required)
:param DataSet body: DataSet object that defines the element (required)
:return: GeneralStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_data_set_with_http_info(api_key, owner, dataset, body, **kwargs)
else:
(data) = self.update_data_set_with_http_info(api_key, owner, dataset, body, **kwargs)
return data
def update_data_set_with_http_info(self, api_key, owner, dataset, body, **kwargs):
"""
Update an existing data set.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_data_set_with_http_info(api_key, owner, dataset, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str api_key: The user api key (required)
:param str owner: The id of the user that this dataset is associated with (required)
:param str dataset: The data set id to update (required)
:param DataSet body: DataSet object that defines the element (required)
:return: GeneralStatus
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_key', 'owner', 'dataset', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_data_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_key' is set
if ('api_key' not in params) or (params['api_key'] is None):
raise ValueError("Missing the required parameter `api_key` when calling `update_data_set`")
# verify the required parameter 'owner' is set
if ('owner' not in params) or (params['owner'] is None):
raise ValueError("Missing the required parameter `owner` when calling `update_data_set`")
# verify the required parameter 'dataset' is set
if ('dataset' not in params) or (params['dataset'] is None):
raise ValueError("Missing the required parameter `dataset` when calling `update_data_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_data_set`")
resource_path = '/datasets/{userId}/{dataSet}'.replace('{format}', 'json')
path_params = {}
if 'owner' in params:
path_params['owner'] = params['owner']
if 'dataset' in params:
path_params['dataset'] = params['dataset']
query_params = {}
header_params = {}
if 'api_key' in params:
header_params['api_key'] = params['api_key']
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GeneralStatus',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 40.533819
| 114
| 0.565836
| 4,296
| 38,953
| 4.915037
| 0.054935
| 0.024438
| 0.021217
| 0.027279
| 0.937911
| 0.927445
| 0.921288
| 0.911248
| 0.903955
| 0.897229
| 0
| 0.000435
| 0.351346
| 38,953
| 960
| 115
| 40.576042
| 0.83524
| 0.347136
| 0
| 0.774554
| 1
| 0
| 0.163554
| 0.031618
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037946
| false
| 0
| 0.015625
| 0
| 0.109375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.